Ejemplo n.º 1
0
    def import_cloud_masks(self, override):
        from osgeo import ogr

        files = self._filter("MSK_CLOUDS_B00.gml")

        for f in files:
            safe_dir = os.path.dirname(f).split(os.path.sep)[-4]
            items = safe_dir.split("_")
            map_name = "_".join([items[5], items[2], "MSK", "CLOUDS"])
            # check if any OGR layer
            dsn = ogr.Open(f)
            layer_count = dsn.GetLayerCount()
            dsn.Destroy()
            if layer_count < 1:
                gs.info(
                    "No clouds layer found in <{}>. Import skipped".format(f))
                continue
            try:
                gs.run_command(
                    "v.import",
                    input=f,
                    flags="o" if override else None,  # same SRS as data
                    output=map_name,
                    quiet=True,
                )
                gs.vector_history(map_name)
            except CalledModuleError as e:
                pass  # error already printed
Ejemplo n.º 2
0
def create_db(driver, database):
    subst_database = substitute_db(database)
    if driver == 'dbf':
        path = subst_database
        # check if destination directory exists
        if not os.path.isdir(path):
	    # create dbf database
            os.makedirs(path)
	    return True
        return False
    
    if driver == 'sqlite':
        path = os.path.dirname(subst_database)
        # check if destination directory exists
        if not os.path.isdir(path):
            os.makedirs(path)
    
    if subst_database in grass.read_command('db.databases', quiet = True,
                                      driver = driver).splitlines():
        return False

    grass.info(_("Target database doesn't exist, "
                 "creating a new database using <%s> driver...") % driver)
    try:
        grass.run_command('db.createdb', driver = driver,
                          database = subst_database)
    except CalledModuleError:
        grass.fatal(_("Unable to create database <%s> by driver <%s>") % \
                        (subst_database, driver))
        
    return False
Ejemplo n.º 3
0
def copy_tab(from_driver, from_database, from_table, to_driver, to_database,
             to_table):
    if (to_table in gscript.read_command(
            "db.tables",
            quiet=True,
            driver=to_driver,
            database=to_database,
            stderr=nuldev,
    ).splitlines()):
        return False

    gscript.info("Copying table <%s> to target database..." % to_table)
    try:
        gscript.run_command(
            "db.copy",
            from_driver=from_driver,
            from_database=from_database,
            from_table=from_table,
            to_driver=to_driver,
            to_database=to_database,
            to_table=to_table,
        )
    except CalledModuleError:
        gscript.fatal(_("Unable to copy table <%s>") % from_table)

    return True
Ejemplo n.º 4
0
def download_files(variables, tiles, path):
    """Download files. The download routine is from
    http://stackoverflow.com/questions/22676/
    how-do-i-download-a-file-over-http-using-python"""

    import urllib2
    bs = "http://biogeo.ucdavis.edu/data/climate/worldclim/1_4/tiles/cur/"
    murl = ["{}{}_{}.zip"
            .format(bs, x, y) for x in variables for y in tiles]
    for url in murl:
        file_name = url.split('/')[-1]
        file_path = "{}{}".format(path, file_name)
        if not os.path.isfile(file_path):
            grass.info("\n")
            u = urllib2.urlopen(url)
            f = open(file_path, 'wb')
            meta = u.info()
            file_size = int(meta.getheaders("Content-Length")[0])
            grass.info("Downloading: {} ({:4.1f} Mb)"
                       .format(file_name, file_size/1000000.))
            file_size_dl = 0
            block_sz = 8192
            while True:
                buffer = u.read(block_sz)
                if not buffer:
                    break

                file_size_dl += len(buffer)
                f.write(buffer)
                status = (r"Downloaded: {:3.2f}%"
                          .format(file_size_dl * 100. / file_size))
                status = status + chr(8)*(len(status)+1)
                print status,

            f.close()
Ejemplo n.º 5
0
    def import_cloud_masks(self):
        from osgeo import ogr

        files = self._filter("MSK_CLOUDS_B00.gml")

        for f in files:
            safe_dir = os.path.dirname(f).split(os.path.sep)[-4]
            items = safe_dir.split('_')
            map_name = '_'.join([items[5], items[2], 'MSK', 'CLOUDS'])
            # check if any OGR layer
            dsn = ogr.Open(f)
            layer_count = dsn.GetLayerCount()
            dsn.Destroy()
            if layer_count < 1:
                gs.info(
                    'No clouds layer found in <{}>. Import skipped'.format(f))
                continue
            try:
                gs.run_command(
                    'v.import',
                    input=f,
                    flags='o',  # same SRS as data
                    output=map_name,
                    quiet=True)
                gs.vector_history(map_name)
            except CalledModuleError as e:
                pass  # error already printed
Ejemplo n.º 6
0
def C_index(n1, n2, txtf):
    """Calculate correlation"""
    corl = gs.read_command("r.covar", quiet=True, flags="r", map=(n1, n2))
    corl = corl.split('N = ')[1]
    corl = float(corl.split(' ')[1])
    gs.info(_("Correlation coeff of {} and {} {}").format(n1.split('@')[0],
            n2.split('@')[0], round(corl, 3)))
    return ["Correlation coeff", n1, n2, corl]
Ejemplo n.º 7
0
def cleanup():
    if len(TMP):
        gs.info(_("Cleaning %d temporary maps...") % len(TMP))
    for rast in TMP:
        gs.run_command('g.remove',
                       type='vector',
                       name=rast,
                       flags='f',
                       quiet=True)
Ejemplo n.º 8
0
def create_index(driver, database, table, index_name, key):
    if driver == 'dbf':
	return False

    grass.info(_("Creating index <%s>...") % index_name)
    if 0 != grass.run_command('db.execute', quiet = True,
                              driver = driver, database = database,
                              sql = "create unique index %s on %s(%s)" % (index_name, table, key)):
        grass.warning(_("Unable to create index <%s>") % index_name)
Ejemplo n.º 9
0
def cleanup():
    if len(TMP):
        gs.info(_("Cleaning %d temporary maps...") % len(TMP))
    for rast in TMP:
        gs.run_command("g.remove",
                       type="vector",
                       name=rast,
                       flags="f",
                       quiet=True)
Ejemplo n.º 10
0
 def printMessage(self, message, type='info'):
     """Call grass message function corresponding to type."""
     if type == 'error':
         grass.error(message)
     elif type == 'warning':
         grass.warning(message)
     elif type == 'info' and grass.gisenv()['GRASS_VERBOSE'] > 0:
         grass.info(message)
     if self.logOutput is True:
         self.__writeLog(message)
Ejemplo n.º 11
0
def create_index(driver, database, table, index_name, key):
    if driver == 'dbf':
	return False

    grass.info(_("Creating index <%s>...") % index_name)
    try:
        grass.run_command('db.execute', quiet = True,
                          driver = driver, database = database,
                          sql = "create unique index %s on %s(%s)" % (index_name, table, key))
    except CalledModuleError:
        grass.warning(_("Unable to create index <%s>") % index_name)
Ejemplo n.º 12
0
def D_index(n1, n2, v1, v2, txtf):
    """Calculate D (Schoener's 1968)"""
    tmpf0 = tmpname("rniche")
    gs.mapcalc("$tmpf0 = abs(double($n1)/$v1 - double($n2)/$v2)",
               tmpf0=tmpf0, n1=n1, v1=v1, n2=n2, v2=v2, quiet=True)
    NO = float(gs.parse_command("r.univar", quiet=True, flags="g",
                                map=tmpf0)['sum'])
    NOV = 1 - (0.5 * NO)
    gs.info(_("Niche overlap (D) of {} and {} {}").format(n1.split('@')[0],
            n2.split('@')[0], round(NOV, 3)))
    return ['Niche overlap (D)', n1, n2, NOV]
Ejemplo n.º 13
0
def create_index(driver, database, table, index_name, key):
    if driver == 'dbf':
        return False

    gscript.info(_("Creating index <%s>...") % index_name)
    try:
        gscript.run_command('db.execute', quiet=True, driver=driver,
                            database=database,
                            sql="create unique index %s on %s(%s)" %
                                (index_name, table, key))
    except CalledModuleError:
        gscript.warning(_("Unable to create index <%s>") % index_name)
Ejemplo n.º 14
0
def parse_bgr_input(alias_input, env_maps, alias_names):
    """Parse environmental background input"""
    if env_maps:
        env_maps = env_maps.split(",")

    if alias_names:
        alias_names = alias_names.split(",")

    if alias_input:
        if not os.access(alias_input, os.R_OK):
            gscript.fatal(
                _("The file containing alias names does not exist or is not readable.")
            )
        gscript.info(
            _(
                "Alias and map names are beeing read from file. Other input regarding environmental parameter(s) will be ignored..."
            )
        )
        with open(alias_input, "r") as alias_txt:
            lines = alias_txt.readlines()
            parameters = []
            alias = []
            for line in lines:
                if "," in line:
                    parameters.append(line.split(",")[1].strip())
                    alias.append(line.split(",")[0].strip())
    else:
        parameters = env_maps

        if alias_names:
            alias = alias_names
        else:
            alias = [param.split("@")[0] for param in parameters]

    # Check if number of alias names is identically with number of background maps
    if len(alias) != len(parameters):
        gscript.fatal(
            _("Number of provided background maps and alias names do not match.")
        )

    for param in parameters:
        # Check if environmental parameter map(s) exist
        if not RasterRow(param).exist():
            gscript.fatal(
                _(
                    "Could not find environmental parameter raster map <{}>".format(
                        param
                    )
                )
            )

    return alias, parameters
Ejemplo n.º 15
0
def I_index(n1, v1, n2, v2, txtf):
    """Calculate I (Warren et al. 2008). Note that the sqrt in the
    H formulation and the ^2 in the I formation  cancel each other out,
    hence the formulation below """
    tmpf1 = tmpname("rniche")
    gs.mapcalc("$tmpf1 = (sqrt(double($n1)/$v1) - sqrt(double($n2)/$v2))^2",
               tmpf1=tmpf1, n1=n1, v1=v1, n2=n2, v2=v2, quiet=True)
    NE = float(gs.parse_command("r.univar", quiet=True, flags="g",
                                map=tmpf1)['sum'])
    NEQ = 1 - (0.5 * NE)
    gs.info(_("Niche overlap (I) of {} and {} {}").format(n1.split('@')[0],
            n2.split('@')[0], round(NEQ, 3)))
    return ['Niche overlap (I)', n1, n2, NEQ]
Ejemplo n.º 16
0
def EB(simlay, reflay):
    """Computation of the envirionmental bias and print to stdout"""
    # Median and mad for whole region (within current mask)
    tmpf4 = tmpname("reb4")
    CLEAN_RAST.append(tmpf4)
    d = gs.read_command("r.quantile",
                        quiet=True,
                        input=simlay,
                        percentiles="50")
    d = d.split(":")
    d = float(d[2].replace("\n", ""))
    gs.mapcalc("$tmpf4 = abs($map - $d)",
               map=simlay,
               tmpf4=tmpf4,
               d=d,
               quiet=True)
    mad = gs.read_command("r.quantile",
                          quiet=True,
                          input=tmpf4,
                          percentiles="50")
    mad = mad.split(":")
    mad = float(mad[2].replace("\n", ""))
    gs.run_command("g.remove",
                   quiet=True,
                   flags="f",
                   type="raster",
                   name=tmpf4)

    # Median and mad for reference layer
    tmpf5 = tmpname("reb5")
    CLEAN_RAST.append(tmpf5)
    gs.mapcalc(
        "$tmpf5 = if($reflay==1, $simlay, null())",
        simlay=simlay,
        tmpf5=tmpf5,
        reflay=reflay,
        quiet=True,
    )
    e = gs.read_command("r.quantile",
                        quiet=True,
                        input=tmpf5,
                        percentiles="50")
    e = e.split(":")
    e = float(e[2].replace("\n", ""))
    EBstat = abs(d - e) / mad

    # Print results to screen and return results
    gs.info(_("Median (all region) = {:.3f}").format(d))
    gs.info(_("Median (ref. area) = {:.3f}").format(e))
    gs.info(_("MAD = {:.3f}").format(mad))
    gs.info(_("EB = {:.3f}").format(EBstat))

    # Clean up and return data
    gs.run_command("g.remove",
                   flags="f",
                   type="raster",
                   name=tmpf5,
                   quiet=True)
    return (mad, d, e, EBstat)
Ejemplo n.º 17
0
def print_map_semantic_label(name, label_reader):
    """Print semantic label information assigned to a single raster map

    :param str name: raster map name
    """
    from grass.pygrass.raster import RasterRow

    try:
        with RasterRow(name) as rast:
            semantic_label = rast.info.semantic_label
            if semantic_label:
                label_reader.print_info(semantic_label)
            else:
                gs.info(_("No semantic label assigned to <{}>").format(name))
    except OpenError as e:
        gs.error(_("Map <{}> not found").format(name))
Ejemplo n.º 18
0
def main():
    remove = options['operation'] == 'remove'
    if remove or flags['f']:
        extensions = gscript.read_command('g.extension', quiet=True,
                                          flags='a').splitlines()
    else:
        extensions = get_extensions()

    if not extensions:
        if remove:
            gscript.info(_("No extension found. Nothing to remove."))
        else:
            gscript.info(
                _("Nothing to rebuild. Rebuilding process can be forced with -f flag."
                  ))
        return 0

    if remove and not flags['f']:
        gscript.message(_("List of extensions to be removed:"))
        print(os.linesep.join(extensions))
        gscript.message(
            _("You must use the force flag (-f) to actually remove them. Exiting."
              ))
        return 0

    for ext in extensions:
        gscript.message('-' * 60)
        if remove:
            gscript.message(_("Removing extension <%s>...") % ext)
        else:
            gscript.message(_("Reinstalling extension <%s>...") % ext)
        gscript.message('-' * 60)
        if remove:
            operation = 'remove'
            operation_flags = 'f'
        else:
            operation = 'add'
            operation_flags = ''
        try:
            gscript.run_command('g.extension',
                                flags=operation_flags,
                                extension=ext,
                                operation=operation)
        except CalledModuleError:
            gscript.error(_("Unable to process extension:%s") % ext)

    return 0
Ejemplo n.º 19
0
def main():
    remove = options["operation"] == "remove"
    if remove or flags["f"]:
        extensions = gscript.read_command("g.extension", quiet=True,
                                          flags="a").splitlines()
    else:
        extensions = get_extensions()

    if not extensions:
        if remove:
            gscript.info(_("No extension found. Nothing to remove."))
        else:
            gscript.info(
                _("Nothing to rebuild. Rebuilding process can be forced with -f flag."
                  ))
        return 0

    if remove and not flags["f"]:
        gscript.message(_("List of extensions to be removed:"))
        print(os.linesep.join(extensions))
        gscript.message(
            _("You must use the force flag (-f) to actually remove them. Exiting."
              ))
        return 0

    for ext in find_addon_name(addons=extensions):
        gscript.message("-" * 60)
        if remove:
            gscript.message(_("Removing extension <%s>...") % ext)
        else:
            gscript.message(_("Reinstalling extension <%s>...") % ext)
        gscript.message("-" * 60)
        if remove:
            operation = "remove"
            operation_flags = "f"
        else:
            operation = "add"
            operation_flags = ""
        try:
            gscript.run_command("g.extension",
                                flags=operation_flags,
                                extension=ext,
                                operation=operation)
        except CalledModuleError:
            gscript.error(_("Unable to process extension:%s") % ext)

    return 0
Ejemplo n.º 20
0
def print_map_band_reference(name, band_reader):
    """Print band reference information assigned to a single raster map

    :param str name: raster map name
    """
    from grass.pygrass.raster import RasterRow

    try:
        with RasterRow(name) as rast:
            band_ref = rast.info.band_reference
            if band_ref:
                shortcut, band = band_ref.split('_')
                band_reader.print_info(shortcut, band)
            else:
                gs.info(_("No band reference assigned to <{}>").format(name))
    except OpenError as e:
        gs.error(_("Map <{}> not found").format(name))
Ejemplo n.º 21
0
def copy_tab(from_driver, from_database, from_table,
             to_driver, to_database, to_table):
    if to_table in grass.read_command('db.tables', quiet = True,
                                      driver = to_driver,
                                      database = to_database,
                                      stderr = nuldev).splitlines():
        return False
    
    grass.info("Copying table <%s> to target database..." % to_table)
    if 0 != grass.run_command('db.copy', from_driver = from_driver,
                              from_database = from_database,
                              from_table = from_table, to_driver = to_driver,
                              to_database = to_database,
                              to_table = to_table):
        grass.fatal(_("Unable to copy table <%s>") % from_table)
    
    return True
Ejemplo n.º 22
0
def main():
    remove = options['operation'] == 'remove'
    if remove or flags['f']:
        extensions = gscript.read_command(
            'g.extension',
            quiet=True,
            flags='a').splitlines()
    else:
        extensions = get_extensions()

    if not extensions:
        if remove:
            gscript.info(_("No extension found. Nothing to remove."))
        else:
            gscript.info(
                _("Nothing to rebuild. Rebuilding process can be forced with -f flag."))
        return 0

    if remove and not flags['f']:
        gscript.message(_("List of extensions to be removed:"))
        print(os.linesep.join(extensions))
        gscript.message(
            _("You must use the force flag (-f) to actually remove them. Exiting."))
        return 0

    for ext in extensions:
        gscript.message('-' * 60)
        if remove:
            gscript.message(_("Removing extension <%s>...") % ext)
        else:
            gscript.message(_("Reinstalling extension <%s>...") % ext)
        gscript.message('-' * 60)
        if remove:
            operation = 'remove'
            operation_flags = 'f'
        else:
            operation = 'add'
            operation_flags = ''
        try:
            gscript.run_command('g.extension', flags=operation_flags,
                                extension=ext, operation=operation)
        except CalledModuleError:
            gscript.error(_("Unable to process extension:%s") % ext)

    return 0
Ejemplo n.º 23
0
def copy_tab(from_driver, from_database, from_table,
             to_driver, to_database, to_table):
    if to_table in gscript.read_command('db.tables', quiet=True,
                                        driver=to_driver,
                                        database=to_database,
                                        stderr=nuldev).splitlines():
        return False

    gscript.info("Copying table <%s> to target database..." % to_table)
    try:
        gscript.run_command('db.copy', from_driver=from_driver,
                            from_database=from_database,
                            from_table=from_table, to_driver=to_driver,
                            to_database=to_database,
                            to_table=to_table)
    except CalledModuleError:
        gscript.fatal(_("Unable to copy table <%s>") % from_table)

    return True
Ejemplo n.º 24
0
def copy_tab(from_driver, from_database, from_table,
             to_driver, to_database, to_table):
    if to_table in grass.read_command('db.tables', quiet = True,
                                      driver = to_driver,
                                      database = to_database,
                                      stderr = nuldev).splitlines():
        return False
    
    grass.info("Copying table <%s> to target database..." % to_table)
    try:
        grass.run_command('db.copy', from_driver = from_driver,
                          from_database = from_database,
                          from_table = from_table, to_driver = to_driver,
                          to_database = to_database,
                          to_table = to_table)
    except CalledModuleError:
        grass.fatal(_("Unable to copy table <%s>") % from_table)
    
    return True
Ejemplo n.º 25
0
def main():
    if flags['f']:
        extensions = grass.read_command('g.extension.py',
                                        quiet = True, flags = 'a').splitlines()
    else:
        extensions = get_extensions()
    
    if not extensions:
        grass.info(_("Nothing to rebuild."))
        return 0
    
    for ext in extensions:
        grass.message('-' * 60)
        grass.message(_("Reinstalling extension <%s>...") % ext)
        grass.message('-' * 60)
        grass.run_command('g.extension.py',
                          extension = ext)
    
    return 0
Ejemplo n.º 26
0
def main():

    # Get the options
    name = options["dataset"]
    type = options["type"]
    temporaltype = options["temporaltype"]
    title = options["title"]
    descr = options["description"]
    semantic = options["semantictype"]
    gran = options["granularity"]

    # Make sure the temporal database exists
    tgis.create_temporal_database()

    #Get the current mapset to create the id of the space time dataset

    mapset =  grass.gisenv()["MAPSET"]
    id = name + "@" + mapset

    if type == "strds":
        sp = tgis.space_time_raster_dataset(id)
    if type == "str3ds":
        sp = tgis.space_time_raster3d_dataset(id)
    if type == "stvds":
        sp = tgis.space_time_vector_dataset(id)

    dbif = tgis.sql_database_interface()
    dbif.connect()

    if sp.is_in_db(dbif) and grass.overwrite() == False:
        grass.fatal("Space time " + sp.get_new_map_instance(None).get_type() + " dataset <" + name + "> is already in the database. Use the overwrite flag.")

    if sp.is_in_db(dbif) and grass.overwrite() == True:
        grass.info("Overwrite space time " + sp.get_new_map_instance(None).get_type() + " dataset <" + name + "> and unregister all maps.")
        sp.delete(dbif)
        sp = sp.get_new_instance(id)

    grass.info("Create space time " + sp.get_new_map_instance(None).get_type() + " dataset.")

    sp.set_initial_values(granularity=gran, temporal_type=temporaltype, semantic_type=semantic, title=title, description=descr)
    sp.insert(dbif)

    dbif.close()
Ejemplo n.º 27
0
def main():
    soillossbare = options['soillossbare']
    cpmax = options['cpmax']
    maxsoilloss = options['maxsoilloss']
    
    r.mapcalc(cpmax + "=" +  maxsoilloss + "/" + soillossbare)
    
    cpmaxrules = '\n '.join([
        "0.00  56:145:37",
        "0.01  128:190:91",
        "0.05  210:233:153",
        "0.1  250:203:147",
        "0.15  225:113:76",
        "0.2  186:20:20",
        "100  0:0:0"
    ])

    r.colors(map = cpmax, rules = '-', stdin = cpmaxrules)
    
    gscript.info('Calculation of CPmax-scenario in <%s> finished.' %cpmax )
Ejemplo n.º 28
0
def main():
    user = password = None
    if options['datasource'] == 'ESA_COAH' or options['datasource'] == 'GCS':
        api_url = 'https://scihub.copernicus.eu/apihub'
    else:
        api_url = 'USGS_EE'
    if options['datasource'] == 'GCS' and (options['producttype'] not in
       ['S2MSI2A','S2MSI1C']):
        gs.fatal(_("Download from GCS only supports producttypes S2MSI2A "
                   "or S2MSI1C"))

    if options['settings'] == '-':
        # stdin
        import getpass
        user = raw_input(_('Insert username: '******'Insert password: '******'Insert API URL (leave empty for {}): ').format(api_url))
        if url:
            api_url = url
    else:
        try:
            with open(options['settings'], 'r') as fd:
                lines = list(filter(None, (line.rstrip() for line in fd))) # non-blank lines only
                if len(lines) < 2:
                    gs.fatal(_("Invalid settings file"))
                user = lines[0].strip()
                password = lines[1].strip()
                if len(lines) > 2:
                    api_url = lines[2].strip()
        except IOError as e:
            gs.fatal(_("Unable to open settings file: {}").format(e))

    if user is None or password is None:
        gs.fatal(_("No user or password given"))

    if flags['b']:
        map_box = get_aoi(options['map'])
    else:
        map_box = get_aoi_box(options['map'])

    sortby = options['sort'].split(',')
    if options['producttype'] in ('SLC', 'GRD', 'OCN'):
        if options['clouds']:
            gs.info(_("Option <{}> ignored: cloud cover percentage "
                    "is not defined for product type {}").format(
                        "clouds", options['producttype']))
            options['clouds'] = None
        try:
            sortby.remove('cloudcoverpercentage')
        except ValueError:
            pass
    try:
        downloader = SentinelDownloader(user, password, api_url)

        if options['uuid']:
            downloader.set_uuid(options['uuid'].split(','))
        else:
            query = {}
            if options['query']:
                for item in options['query'].split(','):
                    k, v = item.split('=')
                    query[k] = v
            filter_args = {
                'area': map_box,
                'area_relation': options['area_relation'],
                'clouds': options['clouds'],
                'producttype': options['producttype'],
                'limit': options['limit'],
                'query': query,
                'start': options['start'],
                'end': options['end'],
                'sortby': sortby,
                'asc': True if options['order'] == 'asc' else False,
                'relativeorbitnumber': options['relativeorbitnumber']
            }
            if options['datasource'] == 'ESA_COAH' or options['datasource'] == 'GCS':
                downloader.filter(**filter_args)
            elif options['datasource'] == 'USGS_EE':
                downloader.filter_USGS(**filter_args)
    except Exception as e:
        gs.fatal(_("Unable to connect to {0}: {1}").format(
            options['datasource'], e))

    if options['footprints']:
        downloader.save_footprints(options['footprints'])

    if flags['l']:
        downloader.list()
        return

    downloader.download(options['output'], options['sleep'],
                        int(options['retry']), options['datasource'])

    return 0
Ejemplo n.º 29
0
def main():
    # Hard-coded parameters needed for USGS datasets
    usgs_product_dict = {
        "ned": {
            'product': 'National Elevation Dataset (NED)',
            'dataset': {
                'ned1sec': (1. / 3600, 30, 100),
                'ned13sec': (1. / 3600 / 3, 10, 30),
                'ned19sec': (1. / 3600 / 9, 3, 10)
            },
            'subset': {},
            'extent': [
                '1 x 1 degree',
                '15 x 15 minute'
            ],
            'format': 'IMG',
            'extension': 'img',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'bilinear',
            'url_split': '/'
        },
        "nlcd": {
            'product': 'National Land Cover Database (NLCD)',
            'dataset': {
                'National Land Cover Database (NLCD) - 2001': (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2006': (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2011': (1. / 3600, 30, 100)
            },
            'subset': {
                'Percent Developed Imperviousness',
                'Percent Tree Canopy',
                'Land Cover'
            },
            'extent': ['3 x 3 degree'],
            'format': 'GeoTIFF',
            'extension': 'tif',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "naip": {
            'product': 'USDA National Agriculture Imagery Program (NAIP)',
            'dataset': {
                'Imagery - 1 meter (NAIP)': (1. / 3600 / 27, 1, 3)},
            'subset': {},
            'extent': [
                '3.75 x 3.75 minute',
            ],
            'format': 'JPEG2000',
            'extension': 'jp2',
            'zip': False,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "lidar": {
            'product': 'Lidar Point Cloud (LPC)',
            'dataset': {
                'Lidar Point Cloud (LPC)': (1. / 3600 / 9, 3, 10)},
            'subset': {},
            'extent': [''],
            'format': 'LAS,LAZ',
            'extension': 'las,laz',
            'zip': True,
            'srs': '',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        }
    }

    # Set GRASS GUI options and flags to python variables
    gui_product = options['product']

    # Variable assigned from USGS product dictionary
    nav_string = usgs_product_dict[gui_product]
    product = nav_string['product']
    product_format = nav_string['format']
    product_extensions = tuple(nav_string['extension'].split(','))
    product_is_zip = nav_string['zip']
    product_srs = nav_string['srs']
    product_proj4 = nav_string['srs_proj4']
    product_interpolation = nav_string['interpolation']
    product_url_split = nav_string['url_split']
    product_extent = nav_string['extent']
    gui_subset = None

    # Parameter assignments for each dataset
    if gui_product == 'ned':
        gui_dataset = options['ned_dataset']
        ned_api_name = ''
        if options['ned_dataset'] == 'ned1sec':
            ned_data_abbrv = 'ned_1arc_'
            ned_api_name = '1 arc-second'
        if options['ned_dataset'] == 'ned13sec':
            ned_data_abbrv = 'ned_13arc_'
            ned_api_name = '1/3 arc-second'
        if options['ned_dataset'] == 'ned19sec':
            ned_data_abbrv = 'ned_19arc_'
            ned_api_name = '1/9 arc-second'
        product_tag = product + " " + ned_api_name

    if gui_product == 'nlcd':
        gui_dataset = options['nlcd_dataset']
        if options['nlcd_dataset'] == 'nlcd2001':
            gui_dataset = 'National Land Cover Database (NLCD) - 2001'
        if options['nlcd_dataset'] == 'nlcd2006':
            gui_dataset = 'National Land Cover Database (NLCD) - 2006'
        if options['nlcd_dataset'] == 'nlcd2011':
            gui_dataset = 'National Land Cover Database (NLCD) - 2011'

        if options['nlcd_subset'] == 'landcover':
            gui_subset = 'Land Cover'
        if options['nlcd_subset'] == 'impervious':
            gui_subset = 'Percent Developed Imperviousness'
        if options['nlcd_subset'] == 'canopy':
            gui_subset = 'Percent Tree Canopy'
        product_tag = gui_dataset

    if gui_product == 'naip':
        gui_dataset = 'Imagery - 1 meter (NAIP)'
        product_tag = nav_string['product']

    has_pdal = gscript.find_program(pgm='v.in.pdal')
    if gui_product == 'lidar':
        gui_dataset = 'Lidar Point Cloud (LPC)'
        product_tag = nav_string['product']
        if not has_pdal:
            gscript.warning(_("Module v.in.pdal is missing,"
                              " any downloaded data will not be processed."))
    # Assigning further parameters from GUI
    gui_output_layer = options['output_name']
    gui_resampling_method = options['resampling_method']
    gui_i_flag = flags['i']
    gui_k_flag = flags['k']
    work_dir = options['output_directory']
    memory = options['memory']
    nprocs = options['nprocs']

    preserve_extracted_files = gui_k_flag
    use_existing_extracted_files = True
    preserve_imported_tiles = gui_k_flag
    use_existing_imported_tiles = True

    if not os.path.isdir(work_dir):
        gscript.fatal(_("Directory <{}> does not exist."
                        " Please create it.").format(work_dir))

    # Returns current units
    try:
        proj = gscript.parse_command('g.proj', flags='g')
        if gscript.locn_is_latlong():
            product_resolution = nav_string['dataset'][gui_dataset][0]
        elif float(proj['meters']) == 1:
            product_resolution = nav_string['dataset'][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string['dataset'][gui_dataset][2]
    except TypeError:
        product_resolution = False
    if gui_product == 'lidar' and options['resolution']:
        product_resolution = float(options['resolution'])

    if gui_resampling_method == 'default':
        gui_resampling_method = nav_string['interpolation']
        gscript.verbose(_("The default resampling method for product {product} is {res}").format(product=gui_product,
                        res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    wgs84 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
    min_coords = gscript.read_command('m.proj', coordinates=(gregion['w'], gregion['s']),
                                      proj_out=wgs84, separator='comma',
                                      flags='d')
    max_coords = gscript.read_command('m.proj', coordinates=(gregion['e'], gregion['n']),
                                      proj_out=wgs84, separator='comma',
                                      flags='d')
    min_list = min_coords.split(',')[:2]
    max_list = max_coords.split(',')[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = quote_plus(gui_prod_str)
    prod_format = quote_plus(product_format)
    prod_extent = quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://viewer.nationalmap.gov/tnmaccess/api/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == 'nlcd':
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try_again_messge = _("Possibly, the query has timed out. Check network configuration and try again.")
    try:
        TNM_API_GET = urlopen(TNM_API_URL, timeout=12)
    except HTTPError as error:
        gscript.fatal(_(
            "HTTP(S) error from USGS TNM API:"
            " {code}: {reason} ({instructions})").format(
                reason=error.reason, code=error.code, instructions=try_again_messge))
    except (URLError, OSError, IOError) as error:
        # Catching also SSLError and potentially others which are
        # subclasses of IOError in Python 2 and of OSError in Python 3.
        gscript.fatal(_(
            "Error accessing USGS TNM API: {error} ({instructions})").format(
                error=error, instructions=try_again_messge))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON['errors']:
            TNM_API_error = return_JSON['errors']
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)
        if gui_product == 'lidar' and options['title_filter']:
            return_JSON['items'] = [item for item in return_JSON['items'] if options['title_filter'] in item['title']]
            return_JSON['total'] = len(return_JSON['items'])

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)
        if f['datasets'][0] not in dataset_name:
            if len(dataset_name) <= 1:
                dataset_name.append(str(f['datasets'][0]))

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON['total'])
    tiles_needed_count = 0
    size_diff_tolerance = 5
    exist_dwnld_size = 0
    if tile_API_count > 0:
        dwnld_size = []
        dwnld_url = []
        dataset_name = []
        TNM_file_titles = []
        exist_dwnld_url = []
        exist_TNM_titles = []
        exist_zip_list = []
        exist_tile_list = []
        extract_zip_list = []
        # for each file returned, assign variables to needed parameters
        for f in return_JSON['items']:
            TNM_file_title = f['title']
            TNM_file_URL = str(f['downloadURL'])
            TNM_file_size = int(f['sizeInBytes'])
            TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
            if gui_product == 'ned':
                local_file_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
                local_zip_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
                local_tile_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
            else:
                local_file_path = os.path.join(work_dir, TNM_file_name)
                local_zip_path = os.path.join(work_dir, TNM_file_name)
                local_tile_path = os.path.join(work_dir, TNM_file_name)
            file_exists = os.path.exists(local_file_path)
            file_complete = None
            # if file exists, but is incomplete, remove file and redownload
            if file_exists:
                existing_local_file_size = os.path.getsize(local_file_path)
                # if local file is incomplete
                if abs(existing_local_file_size - TNM_file_size) > size_diff_tolerance:
                    # add file to cleanup list
                    cleanup_list.append(local_file_path)
                    # NLCD API query returns subsets that cannot be filtered before
                    # results are returned. gui_subset is used to filter results.
                    if not gui_subset:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            down_list()
                        else:
                            continue
                else:
                    if not gui_subset:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            exist_list()
                            exist_dwnld_size += TNM_file_size
                        else:
                            continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                        continue

    # return fatal error if API query returns no results for GUI input
    elif tile_API_count == 0:
        gscript.fatal(_("TNM API ERROR or Zero tiles available for given input parameters."))

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _("\n{0} of {1} files/archive(s) exist locally and will be used by module.").format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: fix this way of reporting and merge it with the one in use
    if exist_tile_list:
        exist_msg = _("\n{0} of {1} files/archive(s) exist locally and will be used by module.").format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: simply continue with whatever is needed to be done in this case
    if cleanup_list:
        cleanup_msg = _("\n{0} existing incomplete file(s) detected and removed. Run module again.").format(len(cleanup_list))
        gscript.fatal(cleanup_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = '0'

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = 'none'

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == 'nlcd':
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = "NLCD {0} data unavailable for input parameters".format(gui_subset)
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
            "USGS file(s) to download:",
            "-------------------------",
            "Total download size:\t{size}",
            "Tile count:\t{count}",
            "USGS SRS:\t{srs}",
            "USGS tile titles:\n{tile}",
            "-------------------------",
        )
        data_info = '\n'.join(data_info).format(size=total_size_str,
                                                count=file_download_count,
                                                srs=product_srs,
                                                tile=TNM_file_titles_info)
    print(data_info)

    if gui_i_flag:
        gscript.info(_("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == 'ned':
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            # download files in chunks rather than write complete files to memory
            dwnld_req = urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()['Content-Length'])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
                gscript.percent(1, 1, 1)
            local_file.close()
            download_count += 1
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
            file_complete = "Download {0} of {1}: COMPLETE".format(
                download_count, TNM_count)
            gscript.info(file_complete)
        except URLError:
            gscript.fatal(_("USGS download request has timed out. Network or formatting error."))
        except StandardError:
            cleanup_list.append(local_file_path)
            if download_count:
                file_failed = "Download {0} of {1}: FAILED".format(
                    download_count, TNM_count)
                gscript.fatal(file_failed)

    # sets already downloaded zip files or tiles to be extracted or imported
    # our pre-stats for extraction are broken, collecting stats during
    used_existing_extracted_tiles_num = 0
    removed_extracted_tiles_num = 0
    old_extracted_tiles_num = 0
    extracted_tiles_num = 0
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        files_to_process = len(local_zip_path_list)
        for i, z in enumerate(local_zip_path_list):
            # TODO: measure only for the files being unzipped
            gscript.percent(i, files_to_process, 10)
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.lower().endswith(product_extensions):
                            extracted_tile = os.path.join(work_dir, str(f))
                            remove_and_extract = True
                            if os.path.exists(extracted_tile):
                                if use_existing_extracted_files:
                                    # if the downloaded file is newer
                                    # than the extracted on, we extract
                                    if os.path.getmtime(extracted_tile) < os.path.getmtime(z):
                                        remove_and_extract = True
                                        old_extracted_tiles_num += 1
                                    else:
                                        remove_and_extract = False
                                        used_existing_extracted_tiles_num += 1
                                else:
                                    remove_and_extract = True
                                if remove_and_extract:
                                    removed_extracted_tiles_num += 1
                                    os.remove(extracted_tile)
                            if remove_and_extract:
                                extracted_tiles_num += 1
                                read_zip.extract(f, work_dir)
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    if not preserve_extracted_files:
                        cleanup_list.append(extracted_tile)
            except IOError as error:
                cleanup_list.append(extracted_tile)
                gscript.fatal(_(
                    "Unable to locate or extract IMG file '{filename}'"
                    " from ZIP archive '{zipname}': {error}").format(
                        filename=extracted_tile, zipname=z, error=error))
        gscript.percent(1, 1, 1)
        # TODO: do this before the extraction begins
        gscript.verbose(_("Extracted {extracted} new tiles and"
                          " used {used} existing tiles").format(
            used=used_existing_extracted_tiles_num,
            extracted=extracted_tiles_num
        ))
        if old_extracted_tiles_num:
            gscript.verbose(_("Found {removed} existing tiles older"
                              " than the corresponding downloaded archive").format(
                            removed=old_extracted_tiles_num
                            ))
        if removed_extracted_tiles_num:
            gscript.verbose(_("Removed {removed} existing tiles").format(
                            removed=removed_extracted_tiles_num
                            ))

    if gui_product == 'lidar' and not has_pdal:
        gscript.fatal(_("Module v.in.pdal is missing,"
                        " cannot process downloaded data."))

    # operations for extracted or complete files available locally
    # We are looking only for the existing maps in the current mapset,
    # but theoretically we could be getting them from other mapsets
    # on search path or from the whole location. User may also want to
    # store the individual tiles in a separate mapset.
    # The big assumption here is naming of the maps (it is a smaller
    # for the files in a dedicated download directory).
    used_existing_imported_tiles_num = 0
    imported_tiles_num = 0
    mapset = get_current_mapset()
    files_to_import = len(local_tile_path_list)

    process_list = []
    process_id_list = []
    process_count = 0
    num_tiles = len(local_tile_path_list)

    with Manager() as manager:
        results = manager.dict()
        for i, t in enumerate(local_tile_path_list):
            # create variables for use in GRASS GIS import process
            LT_file_name = os.path.basename(t)
            LT_layer_name = os.path.splitext(LT_file_name)[0]
            # we are removing the files if requested even if we don't use them
            # do not remove by default with NAIP, there are no zip files
            if gui_product != 'naip' and not preserve_extracted_files:
                cleanup_list.append(t)
            # TODO: unlike the files, we don't compare date with input
            if use_existing_imported_tiles and map_exists("raster", LT_layer_name, mapset):
                patch_names.append(LT_layer_name)
                used_existing_imported_tiles_num += 1
            else:
                in_info = _("Importing and reprojecting {name}"
                            " ({count} out of {total})...").format(
                                name=LT_file_name, count=i + 1, total=files_to_import)
                gscript.info(in_info)

                process_count += 1
                if gui_product != 'lidar':
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i, LT_layer_name),
                        target=run_file_import, kwargs=dict(
                            identifier=i, results=results,
                            input=t, output=LT_layer_name,
                            resolution='value', resolution_value=product_resolution,
                            extent="region", resample=product_interpolation,
                            memory=memory
                        ))
                else:
                    srs = options['input_srs']
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i, LT_layer_name),
                        target=run_lidar_import, kwargs=dict(
                            identifier=i, results=results,
                            input=t, output=LT_layer_name,
                            input_srs=srs if srs else None
                        ))
                process.start()
                process_list.append(process)
                process_id_list.append(i)

            # Wait for processes to finish when we reached the max number
            # of processes.
            if process_count == nprocs or i == num_tiles - 1:
                exitcodes = 0
                for process in process_list:
                    process.join()
                    exitcodes += process.exitcode
                if exitcodes != 0:
                    if nprocs > 1:
                        gscript.fatal(_("Parallel import and reprojection failed."
                                        " Try running with nprocs=1."))
                    else:
                        gscript.fatal(_("Import and reprojection step failed."))
                for identifier in process_id_list:
                    if "errors" in results[identifier]:
                        gscript.warning(results[identifier]["errors"])
                    else:
                        patch_names.append(results[identifier]["output"])
                        imported_tiles_num += 1
                # Empty the process list
                process_list = []
                process_id_list = []
                process_count = 0
        # no process should be left now
        assert not process_list
        assert not process_id_list
        assert not process_count

    gscript.verbose(_("Imported {imported} new tiles and"
                      " used {used} existing tiles").format(
        used=used_existing_imported_tiles_num,
        imported=imported_tiles_num
    ))

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # v.surf.rst lidar params
    rst_params = dict(tension=25, smooth=0.1, npmin=100)

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if len(patch_names) > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command('g.region', res=product_resolution, flags='a')
                if gui_product == 'naip':
                    for i in ('1', '2', '3', '4'):
                        patch_names_i = [name + '.' + i for name in patch_names]
                        output = gui_output_layer + '.' + i
                        gscript.run_command('r.patch', input=patch_names_i,
                                            output=output)
                        gscript.raster_history(output)
                elif gui_product == 'lidar':
                    gscript.run_command('v.patch', flags='nzb', input=patch_names,
                                        output=gui_output_layer)
                    gscript.run_command('v.surf.rst', input=gui_output_layer,
                                        elevation=gui_output_layer, nprocs=nprocs,
                                        **rst_params)
                else:
                    gscript.run_command('r.patch', input=patch_names,
                                        output=gui_output_layer)
                    gscript.raster_history(gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added").format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if not -k flag
                if not preserve_imported_tiles:
                    if gui_product == 'naip':
                        for i in ('1', '2', '3', '4'):
                            patch_names_i = [name + '.' + i for name in patch_names]
                            gscript.run_command('g.remove', type='raster',
                                                name=patch_names_i, flags='f')
                    elif gui_product == 'lidar':
                        gscript.run_command('g.remove', type='vector',
                                            name=patch_names + [gui_output_layer], flags='f')
                    else:
                        gscript.run_command('g.remove', type='raster',
                                            name=patch_names, flags='f')
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
            temp_down_count = _(
                "{0} of {1} tiles successfully imported and patched").format(
                    completed_tiles_count, tiles_needed_count)
            gscript.info(temp_down_count)
        elif len(patch_names) == 1:
            if gui_product == 'naip':
                for i in ('1', '2', '3', '4'):
                    gscript.run_command('g.rename', raster=(patch_names[0] + '.' + i, gui_output_layer + '.' + i))
            elif gui_product == 'lidar':
                if product_resolution:
                    gscript.run_command('g.region', res=product_resolution, flags='a')
                gscript.run_command('v.surf.rst', input=patch_names[0],
                                    elevation=gui_output_layer, nprocs=nprocs,
                                    **rst_params)
                if not preserve_imported_tiles:
                    gscript.run_command('g.remove', type='vector',
                                        name=patch_names[0], flags='f')
            else:
                gscript.run_command('g.rename', raster=(patch_names[0], gui_output_layer))
            temp_down_count = _("Tile successfully imported")
            gscript.info(temp_down_count)
        else:
            gscript.fatal(_("No tiles imported successfully. Nothing to patch."))
    else:
        gscript.fatal(_(
            "Error in getting or importing the data (see above). Please retry."))

    # Keep source files if 'k' flag active
    if gui_k_flag:
        src_msg = ("<k> flag selected: Source tiles remain in '{0}'").format(work_dir)
        gscript.info(src_msg)

    # set appropriate color table
    if gui_product == 'ned':
        gscript.run_command('r.colors', map=gui_output_layer, color='elevation')

    # composite NAIP
    if gui_product == 'naip':
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=gui_output_layer + '.1')
        gscript.run_command('r.composite', red=gui_output_layer + '.1',
                            green=gui_output_layer + '.2', blue=gui_output_layer + '.3',
                            output=gui_output_layer)
        gscript.raster_history(gui_output_layer)
        gscript.del_temp_region()
Ejemplo n.º 30
0
def extendLine(map, map_out, maxlen=200, scale=0.5, debug=False, verbose=1):
    #
    # map=Input map name
    # map_out=Output map with extensions
    # maxlen=Max length in map units that line can be extended (def=200)
    # scale=Maximum length of extension as proportion of original line, disabled if 0 (def=0.5)
    # vlen=number of verticies to look back in calculating line end direction (def=1)
    # Not sure if it is worth putting this in as parameter.
    #
    allowOverwrite = os.getenv('GRASS_OVERWRITE', '0') == '1'
    grass.info("map={}, map_out={}, maxlen={}, scale={}, debug={}".format(
        map, map_out, maxlen, scale, debug))
    vlen = 1  # not sure if this is worth putting in as parameter
    cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'parent', 'INTEGER'),
            (u'dend', 'TEXT'), (u'orgx', 'DOUBLE PRECISION'),
            (u'orgy', 'DOUBLE PRECISION'), (u'search_len', 'DOUBLE PRECISION'),
            (u'search_az', 'DOUBLE PRECISION'), (u'best_xid', 'INTEGER'),
            (u'near_x', 'DOUBLE PRECISION'), (u'near_y', 'DOUBLE PRECISION'),
            (u'other_cat', 'INTEGER'), (u'xtype', 'TEXT'),
            (u'x_len', 'DOUBLE PRECISION')]
    extend = VectorTopo('extend')
    if extend.exist():
        extend.remove()
    extend.open('w', tab_name='extend', tab_cols=cols)
    #
    # Go through input map, looking at each line and it's two nodes to find nodes
    # with only a single line starting/ending there - i.e. a dangle.
    # For each found, generate an extension line in the new map "extend"
    #
    inMap = VectorTopo(map)
    inMap.open('r')
    dangleCnt = 0
    tickLen = len(inMap)
    grass.info("Searching {} features for dangles".format(tickLen))
    ticker = 0
    grass.message("Percent complete...")
    for ln in inMap:
        ticker = (ticker + 1)
        grass.percent(ticker, tickLen, 5)
        if ln.gtype == 2:  # Only process lines
            for nd in ln.nodes():
                if nd.nlines == 1:  # We have a dangle
                    dangleCnt = dangleCnt + 1
                    vtx = min(len(ln) - 1, vlen)
                    if len([1 for _ in nd.lines(only_out=True)
                            ]) == 1:  # Dangle starting at node
                        dend = "head"
                        sx = ln[0].x
                        sy = ln[0].y
                        dx = sx - ln[vtx].x
                        dy = sy - ln[vtx].y
                    else:  # Dangle ending at node
                        dend = "tail"
                        sx = ln[-1].x
                        sy = ln[-1].y
                        dx = sx - ln[-(vtx + 1)].x
                        dy = sy - ln[-(vtx + 1)].y
                    endaz = math.atan2(dy, dx)
                    if scale > 0:
                        extLen = min(ln.length() * scale, maxlen)
                    else:
                        extLen = maxlen
                    ex = extLen * math.cos(endaz) + sx
                    ey = extLen * math.sin(endaz) + sy
                    extLine = geo.Line([(sx, sy), (ex, ey)])
                    quiet = extend.write(extLine,
                                         (ln.cat, dend, sx, sy, extLen, endaz,
                                          0, 0, 0, 0, 'null', extLen))

    grass.info(
        "{} dangle nodes found, committing table extend".format(dangleCnt))
    extend.table.conn.commit()
    extend.close(build=True, release=True)
    inMap.close()

    #
    # Create two tables where extensions intersect;
    # 1. intersect with original lines
    # 2. intersect with self - to extract intersects between extensions
    #
    # First the intersects with original lines
    grass.info(
        "Searching for intersects between potential extensions and original lines"
    )
    table_isectIn = Table('isectIn',
                          connection=sqlite3.connect(get_path(path)))
    if table_isectIn.exist():
        table_isectIn.drop(force=True)
    run_command("v.distance",
                flags='a',
                overwrite=True,
                quiet=True,
                from_="extend",
                from_type="line",
                to=map,
                to_type="line",
                dmax="0",
                upload="cat,dist,to_x,to_y",
                column="near_cat,dist,nx,ny",
                table="isectIn")
    # Will have touched the dangle it comes from, so remove those touches
    run_command(
        "db.execute",
        sql=
        "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn INNER JOIN extend ON from_cat=cat WHERE near_cat=parent)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="ALTER TABLE isectIn ADD ntype VARCHAR",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="UPDATE isectIn SET ntype = 'orig' ",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    #
    # Now second self intersect table
    #
    grass.info("Searching for intersects of potential extensions")
    table_isectX = Table('isectX', connection=sqlite3.connect(get_path(path)))
    if table_isectX.exist():
        table_isectX.drop(force=True)
    run_command("v.distance",
                flags='a',
                overwrite=True,
                quiet=True,
                from_="extend",
                from_type="line",
                to="extend",
                to_type="line",
                dmax="0",
                upload="cat,dist,to_x,to_y",
                column="near_cat,dist,nx,ny",
                table="isectX")
    # Obviously all extensions will intersect with themself, so remove those "intersects"
    run_command("db.execute",
                sql="DELETE FROM isectX WHERE from_cat = near_cat",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="ALTER TABLE isectX ADD ntype VARCHAR",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="UPDATE isectX SET ntype = 'ext' ",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    #
    # Combine the two tables and add a few more attributes
    #
    run_command("db.execute",
                sql="INSERT INTO isectIn SELECT * FROM isectX",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    cols_isectIn = Columns('isectIn',
                           connection=sqlite3.connect(get_path(path)))
    cols_isectIn.add(['from_x'], ['DOUBLE PRECISION'])
    cols_isectIn.add(['from_y'], ['DOUBLE PRECISION'])
    cols_isectIn.add(['ext_len'], ['DOUBLE PRECISION'])
    # Get starting coordinate at the end of the dangle
    run_command(
        "db.execute",
        sql=
        "UPDATE isectIn SET from_x = (SELECT extend.orgx FROM extend WHERE from_cat=extend.cat)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command(
        "db.execute",
        sql=
        "UPDATE isectIn SET from_y = (SELECT extend.orgy FROM extend WHERE from_cat=extend.cat)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()
    # For each intersect point, calculate the distance along extension line from end of dangle
    # Would be nicer to do this in the database but SQLite dosen't support sqrt or exponents
    grass.info(
        "Calculating distances of intersects along potential extensions")
    cur = table_isectIn.execute(
        sql_code="SELECT rowid, from_x, from_y, nx, ny FROM isectIn")
    for row in cur.fetchall():
        rowid, fx, fy, nx, ny = row
        x_len = math.sqrt((fx - nx)**2 + (fy - ny)**2)
        sqlStr = "UPDATE isectIn SET ext_len={:.8f} WHERE rowid={:d}".format(
            x_len, rowid)
        table_isectIn.execute(sql_code=sqlStr)
    grass.verbose("Ready to commit isectIn changes")
    table_isectIn.conn.commit()
    # Remove any zero distance from end of their dangle.
    # This happens when another extension intersects exactly at that point
    run_command("db.execute",
                sql="DELETE FROM isectIn WHERE ext_len = 0.0",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()

    # Go through the extensions and find the intersect closest to each origin.
    grass.info("Searching for closest intersect for each potential extension")

    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN bst INTEGER"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nrx DOUBLE PRECISION"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nry DOUBLE PRECISION"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN ocat TEXT"
    #    run_command("db.execute",
    #                sql = "INSERT OR REPLACE INTO extend_t1 (bst, nrx, nry, ocat) VALUES ((SELECT isectIn.rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=extend_t1.cat ORDER BY ext_len ASC LIMIT 1))",
    #               driver = "sqlite",
    #               database = "$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")

    grass.verbose("CREATE index")
    run_command("db.execute",
                sql="CREATE INDEX idx_from_cat ON isectIn (from_cat)",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE best_xid")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET best_xid = (SELECT isectIn.rowid FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE x_len")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET x_len = (SELECT ext_len FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE near_x")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET near_x = (SELECT nx FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE near_y")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET near_y = (SELECT ny FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE other_cat")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET other_cat = (SELECT near_cat FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE xtype")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET xtype = (SELECT ntype FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("DROP index")
    run_command("db.execute",
                sql="DROP INDEX idx_from_cat",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("CREATE index on near_cat")
    run_command("db.execute",
                sql="CREATE INDEX idx_near_cat ON isectIn (near_cat)",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")

    quiet = table_isectIn.filters.select('rowid', 'ext_len', 'nx', 'ny',
                                         'near_cat', 'ntype')
    #    quiet=table_isectIn.filters.order_by(['ext_len ASC'])
    quiet = table_isectIn.filters.order_by('ext_len ASC')
    quiet = table_isectIn.filters.limit(1)
    table_extend = Table('extend', connection=sqlite3.connect(get_path(path)))

    # Code below was relplaced by commands above untill memory problem can be sorted
    #    table_extend.filters.select('cat')
    #    cur=table_extend.execute()
    #    updateCnt = 0
    #    for row in cur.fetchall():
    #        cat, = row
    #        quiet=table_isectIn.filters.where('from_cat={:d}'.format(cat))

    ##SELECT rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=32734 ORDER BY ext_len ASC LIMIT 1

    #        x_sect=table_isectIn.execute().fetchone()
    #        if x_sect is not None:
    #            x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect
    #            sqlStr="UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(x_rowid, ext_len, nx, ny, other_cat, ntype, cat)
    #            table_extend.execute(sql_code=sqlStr)
    ## Try periodic commit to avoide crash!
    #            updateCnt = (updateCnt + 1) % 10000
    #            if updateCnt == 0:
    #              table_extend.conn.commit()
    grass.verbose("Ready to commit extend changes")
    table_extend.conn.commit()
    #
    # There may be extensions that crossed, and that intersection chosen by one but
    # not "recripricated" by the other.
    # Need to remove those possibilities and allow the jilted extension to re-search.
    #
    grass.verbose("Deleting intersects already resolved")
    run_command(
        "db.execute",
        sql=
        "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn JOIN extend ON near_cat=cat WHERE ntype='ext' AND xtype!='null')",  #"AND from_cat!=other_cat" no second chance!
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()
    grass.verbose("Deleting complete")

    # To find the jilted - need a copy of extensions that have found an
    # intersection (won't overwrite so drop first)
    grass.verbose(
        "Re-searching for mis-matched intersects between potential extensions")
    table_imatch = Table('imatch', connection=sqlite3.connect(get_path(path)))
    if table_imatch.exist():
        table_imatch.drop(force=True)
    wvar = "xtype!='null'"
    run_command(
        "db.copy",
        overwrite=True,
        quiet=True,
        from_driver="sqlite",
        from_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
        from_table="extend",
        to_driver="sqlite",
        to_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
        to_table="imatch",
        where=wvar)
    # Memory problems?
    if gc.isenabled():
        grass.verbose("Garbage collection enabled - forcing gc cycle")
        gc.collect()
    else:
        grass.verbose("Garbage collection not enabled")
# Ensure tables are commited
    table_extend.conn.commit()
    table_imatch.conn.commit()
    table_isectIn.conn.commit()
    # Identify the jilted
    sqlStr = "SELECT extend.cat FROM extend JOIN imatch ON extend.other_cat=imatch.cat WHERE extend.xtype='ext' and extend.cat!=imatch.other_cat"
    cur = table_extend.execute(sql_code=sqlStr)
    updateCnt = 0
    for row in cur.fetchall():
        cat, = row
        grass.verbose("Reworking extend.cat={}".format(cat))
        quiet = table_isectIn.filters.where('from_cat={:d}'.format(cat))
        #print("SQL: {}".format(table_isectIn.filters.get_sql()))
        x_sect = table_isectIn.execute().fetchone(
        )  ## Problem here under modules
        if x_sect is None:
            sqlStr = "UPDATE extend SET best_xid=0, x_len=search_len, near_x=0, near_y=0, other_cat=0, xtype='null' WHERE cat={:d}".format(
                cat)
        else:
            x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect
            sqlStr = "UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(
                x_rowid, ext_len, nx, ny, other_cat, ntype, cat)
        table_extend.execute(sql_code=sqlStr)
        ## Try periodic commit to avoide crash!
        updateCnt = (updateCnt + 1) % 100
        if (updateCnt == 0):  # or (cat == 750483):
            grass.verbose(
                "XXXXXXXXXXX Committing table_extend XXXXXXXXXXXXXXXXXXXXXX")
            table_extend.conn.commit()

    grass.verbose("Committing adjustments to table extend")
    table_extend.conn.commit()
    #
    # For debugging, create a map with the chosen intersect points
    #
    if debug:
        wvar = "xtype!='null' AND x_len!=0"
        #        print(wvar)
        run_command(
            "v.in.db",
            overwrite=True,
            quiet=True,
            table="extend",
            driver="sqlite",
            database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
            x="near_x",
            y="near_y",
            key="cat",
            where=wvar,
            output="chosen")
#
# Finally adjust the dangle lines in input map - use a copy (map_out) if requested
#
    if map_out:
        run_command("g.copy",
                    overwrite=allowOverwrite,
                    quiet=True,
                    vector=map + "," + map_out)
    else:  # Otherwise just modify the original dataset (map)
        if allowOverwrite:
            grass.warning("Modifying vector map ({})".format(map))
            map_out = map
        else:
            grass.error(
                "Use switch --o to modifying input vector map ({})".format(
                    map))
            return 1
#
# Get info for lines that need extending
    table_extend.filters.select(
        'parent, dend, near_x, near_y, search_az, xtype')
    table_extend.filters.where("xtype!='null'")
    extLines = table_extend.execute().fetchall()
    cat_mods = [ext[0] for ext in extLines]
    tickLen = len(cat_mods)
    grass.info("Extending {} dangles".format(tickLen))
    ticker = 0
    grass.message("Percent complete...")

    # Open up the map_out copy (or the original) and work through looking for lines that need modifying
    inMap = VectorTopo(map_out)
    inMap.open('rw', tab_name=map_out)

    for ln_idx in range(len(inMap)):
        ln = inMap.read(ln_idx + 1)
        if ln.gtype == 2:  # Only process lines
            while ln.cat in cat_mods:  # Note: could be 'head' and 'tail'
                ticker = (ticker + 1)
                grass.percent(ticker, tickLen, 5)
                cat_idx = cat_mods.index(ln.cat)
                cat, dend, nx, ny, endaz, xtype = extLines.pop(cat_idx)
                dump = cat_mods.pop(cat_idx)
                if xtype == 'orig':  # Overshoot by 0.1 as break lines is unreliable
                    nx = nx + 0.1 * math.cos(endaz)
                    ny = ny + 0.1 * math.sin(endaz)
                newEnd = geo.Point(x=nx, y=ny, z=None)
                if dend == 'head':
                    ln.insert(0, newEnd)
                else:  # 'tail'
                    ln.append(newEnd)
                quiet = inMap.rewrite(ln_idx + 1, ln)
        else:
            quite = inMap.delete(ln_idx + 1)


## Try periodic commit and garbage collection to avoide crash!
        if (ln_idx % 1000) == 0:
            #           inMap.table.conn.commit()  - no such thing - Why??
            if gc.isenabled():
                quiet = gc.collect()

    inMap.close(build=True, release=True)
    grass.message("v.extendlines completing")
    #
    # Clean up temporary tables and maps
    #
    if not debug:
        table_isectIn.drop(force=True)
        table_isectX.drop(force=True)
        table_imatch.drop(force=True)
        extend.remove()
        chosen = VectorTopo('chosen')
        if chosen.exist():
            chosen.remove()
    return 0
Ejemplo n.º 31
0
def import_stds(input, output, directory, title=None, descr=None, location=None,
                link=False, exp=False, overr=False, create=False,
                stds_type="strds", base=None, set_current_region=False):
    """Import space time datasets of type raster and vector

        :param input: Name of the input archive file
        :param output: The name of the output space time dataset
        :param directory: The extraction directory
        :param title: The title of the new created space time dataset
        :param descr: The description of the new created
                     space time dataset
        :param location: The name of the location that should be created,
                        maps are imported into this location
        :param link: Switch to link raster maps instead importing them
        :param exp: Extend location extents based on new dataset
        :param overr: Override projection (use location's projection)
        :param create: Create the location specified by the "location"
                      parameter and exit.
                      Do not import the space time datasets.
        :param stds_type: The type of the space time dataset that
                         should be imported
        :param base: The base name of the new imported maps, it will be
                     extended using a numerical index.
    """

    global raise_on_error
    old_state = gscript.raise_on_error
    gscript.set_raise_on_error(True)

    # Check if input file and extraction directory exits
    if not os.path.exists(input):
        gscript.fatal(_("Space time raster dataset archive <%s> not found")
                      % input)
    if not create and not os.path.exists(directory):
        gscript.fatal(_("Extraction directory <%s> not found") % directory)

    tar = tarfile.open(name=input, mode='r')

    # Check for important files
    msgr = get_tgis_message_interface()
    msgr.message(_("Checking validity of input file (size: %0.1f MB). Make take a while..."
        % (os.path.getsize(input)/(1024*1024.0))))
    members = tar.getnames()
    # Make sure that the basenames of the files are used for comparison
    member_basenames = [os.path.basename(name) for name in members]

    if init_file_name not in member_basenames:
        gscript.fatal(_("Unable to find init file <%s>") % init_file_name)
    if list_file_name not in member_basenames:
        gscript.fatal(_("Unable to find list file <%s>") % list_file_name)
    if proj_file_name not in member_basenames:
        gscript.fatal(_("Unable to find projection file <%s>") % proj_file_name)

    msgr.message(_("Extracting data..."))
    tar.extractall(path=directory)
    tar.close()

    # We use a new list file name for map registration
    new_list_file_name = list_file_name + "_new"
    # Save current working directory path
    old_cwd = os.getcwd()

    # Switch into the data directory
    os.chdir(directory)

    # Check projection information
    if not location:
        temp_name = gscript.tempfile()
        temp_file = open(temp_name, "w")
        proj_name = os.path.abspath(proj_file_name)

        # We need to convert projection strings generated 
        # from other programms than g.proj into
        # new line format so that the grass file comparison function
        # can be used to compare the projections
        proj_name_tmp = temp_name + "_in_projection"
        proj_file = open(proj_name, "r")
        proj_content = proj_file.read()
        proj_content = proj_content.replace(" +", "\n+")
        proj_content = proj_content.replace("\t+", "\n+")
        proj_file.close()

        proj_file = open(proj_name_tmp, "w")
        proj_file.write(proj_content)
        proj_file.close()

        p = gscript.start_command("g.proj", flags="j", stdout=temp_file)
        p.communicate()
        temp_file.close()

        if not gscript.compare_key_value_text_files(temp_name, proj_name_tmp,
                                                    sep="="):
            if overr:
                gscript.warning(_("Projection information does not match. "
                                  "Proceeding..."))
            else:
                diff = ''.join(gscript.diff_files(temp_name, proj_name))
                gscript.warning(_("Difference between PROJ_INFO file of "
                                  "imported map and of current location:"
                                  "\n{diff}").format(diff=diff))
                gscript.fatal(_("Projection information does not match. "
                                "Aborting."))

    # Create a new location based on the projection information and switch
    # into it
    old_env = gscript.gisenv()
    if location:
        try:
            proj4_string = open(proj_file_name, 'r').read()
            gscript.create_location(dbase=old_env["GISDBASE"],
                                    location=location,
                                    proj4=proj4_string)
            # Just create a new location and return
            if create:
                os.chdir(old_cwd)
                return
        except Exception as e:
            gscript.fatal(_("Unable to create location %(l)s. Reason: %(e)s")
                          % {'l': location, 'e': str(e)})
        # Switch to the new created location
        try:
            gscript.run_command("g.mapset", mapset="PERMANENT",
                                location=location,
                                dbase=old_env["GISDBASE"])
        except CalledModuleError:
            gscript.fatal(_("Unable to switch to location %s") % location)
        # create default database connection
        try:
            gscript.run_command("t.connect", flags="d")
        except CalledModuleError:
            gscript.fatal(_("Unable to create default temporal database "
                            "in new location %s") % location)

    try:
        # Make sure the temporal database exists
        factory.init()

        fs = "|"
        maplist = []
        mapset = get_current_mapset()
        list_file = open(list_file_name, "r")
        new_list_file = open(new_list_file_name, "w")

        # get number of lines to correctly form the suffix
        max_count = -1
        for max_count, l in enumerate(list_file):
            pass
        max_count += 1
        list_file.seek(0)

        # Read the map list from file
        line_count = 0
        while True:
            line = list_file.readline()
            if not line:
                break

            line_list = line.split(fs)

            # The filename is actually the base name of the map
            # that must be extended by the file suffix
            filename = line_list[0].strip().split(":")[0]
            if base:
                mapname = "%s_%s" % (base, gscript.get_num_suffix(line_count + 1,
                                                                  max_count))
                mapid = "%s@%s" % (mapname, mapset)
            else:
                mapname = filename
                mapid = mapname + "@" + mapset

            row = {}
            row["filename"] = filename
            row["name"] = mapname
            row["id"] = mapid
            row["start"] = line_list[1].strip()
            row["end"] = line_list[2].strip()

            new_list_file.write("%s%s%s%s%s\n" % (mapname, fs, row["start"],
                                                  fs, row["end"]))

            maplist.append(row)
            line_count += 1

        list_file.close()
        new_list_file.close()

        # Read the init file
        fs = "="
        init = {}
        init_file = open(init_file_name, "r")
        while True:
            line = init_file.readline()
            if not line:
                break

            kv = line.split(fs)
            init[kv[0]] = kv[1].strip()

        init_file.close()

        if "temporal_type" not in init or \
           "semantic_type" not in init or \
           "number_of_maps" not in init:
            gscript.fatal(_("Key words %(t)s, %(s)s or %(n)s not found in init"
                            " file.") % {'t': "temporal_type",
                                         's': "semantic_type",
                                         'n': "number_of_maps"})

        if line_count != int(init["number_of_maps"]):
            gscript.fatal(_("Number of maps mismatch in init and list file."))

        format_ = "GTiff"
        type_ = "strds"

        if "stds_type" in init:
            type_ = init["stds_type"]
        if "format" in init:
            format_ = init["format"]

        if stds_type != type_:
            gscript.fatal(_("The archive file is of wrong space time dataset"
                            " type"))

        # Check the existence of the files
        if format_ == "GTiff":
            for row in maplist:
                filename = row["filename"] + ".tif"
                if not os.path.exists(filename):
                    gscript.fatal(_("Unable to find GeoTIFF raster file "
                                    "<%s> in archive.") % filename)
        elif format_ == "AAIGrid":
            for row in maplist:
                filename = row["filename"] + ".asc"
                if not os.path.exists(filename):
                    gscript.fatal(_("Unable to find AAIGrid raster file "
                                    "<%s> in archive.") % filename)
        elif format_ == "GML":
            for row in maplist:
                filename = row["filename"] + ".xml"
                if not os.path.exists(filename):
                    gscript.fatal(_("Unable to find GML vector file "
                                    "<%s> in archive.") % filename)
        elif format_ == "pack":
            for row in maplist:
                if type_ == "stvds":
                    filename = str(row["filename"].split(":")[0]) + ".pack"
                else:
                    filename = row["filename"] + ".pack"
                if not os.path.exists(filename):
                    gscript.fatal(_("Unable to find GRASS package file "
                                    "<%s> in archive.") % filename)
        else:
            gscript.fatal(_("Unsupported input format"))

        # Check the space time dataset
        id = output + "@" + mapset
        sp = dataset_factory(type_, id)
        if sp.is_in_db() and gscript.overwrite() is False:
            gscript.fatal(_("Space time %(t)s dataset <%(sp)s> is already in"
                            " the database. Use the overwrite flag.") %
                          {'t': type_, 'sp': sp.get_id()})

        # Import the maps
        if type_ == "strds":
            if format_ == "GTiff" or format_ == "AAIGrid":
                _import_raster_maps_from_gdal(maplist, overr, exp, location,
                                              link, format_, set_current_region)
            if format_ == "pack":
                _import_raster_maps(maplist, set_current_region)
        elif type_ == "stvds":
            if format_ == "GML":
                _import_vector_maps_from_gml(
                    maplist, overr, exp, location, link)
            if format_ == "pack":
                _import_vector_maps(maplist)

        # Create the space time dataset
        if sp.is_in_db() and gscript.overwrite() is True:
            gscript.info(_("Overwrite space time %(sp)s dataset "
                           "<%(id)s> and unregister all maps.") %
                         {'sp': sp.get_new_map_instance(None).get_type(),
                          'id': sp.get_id()})
            sp.delete()
            sp = sp.get_new_instance(id)

        temporal_type = init["temporal_type"]
        semantic_type = init["semantic_type"]
        relative_time_unit = None
        if temporal_type == "relative":
            if "relative_time_unit" not in init:
                gscript.fatal(_("Key word %s not found in init file.") %
                              ("relative_time_unit"))
            relative_time_unit = init["relative_time_unit"]
            sp.set_relative_time_unit(relative_time_unit)

        gscript.verbose(_("Create space time %s dataset.") %
                        sp.get_new_map_instance(None).get_type())

        sp.set_initial_values(temporal_type=temporal_type,
                              semantic_type=semantic_type, title=title,
                              description=descr)
        sp.insert()

        # register the maps
        fs = "|"
        register_maps_in_space_time_dataset(
            type=sp.get_new_map_instance(None).get_type(),
            name=output, file=new_list_file_name, start="file",
            end="file", unit=relative_time_unit, dbif=None, fs=fs,
            update_cmd_list=False)

        os.chdir(old_cwd)
    except:
        raise

    # Make sure the location is switched back correctly
    finally:
        if location:
            # Switch to the old location
            try:
                gscript.run_command("g.mapset", mapset=old_env["MAPSET"],
                                    location=old_env["LOCATION_NAME"],
                                    gisdbase=old_env["GISDBASE"])
            except CalledModuleError:
                grass.warning(_("Switching to original location failed"))

        gscript.set_raise_on_error(old_state)
Ejemplo n.º 32
0
def main():
    # Hard-coded parameters needed for USGS datasets
    usgs_product_dict = {
        "ned": {
            'product': 'National Elevation Dataset (NED)',
            'dataset': {
                'ned1sec': (1. / 3600, 30, 100),
                'ned13sec': (1. / 3600 / 3, 10, 30),
                'ned19sec': (1. / 3600 / 9, 3, 10)
            },
            'subset': {},
            'extent': ['1 x 1 degree', '15 x 15 minute'],
            'format': 'IMG',
            'extension': 'img',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'bilinear',
            'url_split': '/'
        },
        "nlcd": {
            'product': 'National Land Cover Database (NLCD)',
            'dataset': {
                'National Land Cover Database (NLCD) - 2001':
                (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2006':
                (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2011':
                (1. / 3600, 30, 100)
            },
            'subset': {
                'Percent Developed Imperviousness', 'Percent Tree Canopy',
                'Land Cover'
            },
            'extent': ['3 x 3 degree'],
            'format': 'GeoTIFF',
            'extension': 'tif',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "naip": {
            'product': 'USDA National Agriculture Imagery Program (NAIP)',
            'dataset': {
                'Imagery - 1 meter (NAIP)': (1. / 3600 / 27, 1, 3)
            },
            'subset': {},
            'extent': [
                '3.75 x 3.75 minute',
            ],
            'format': 'JPEG2000',
            'extension': 'jp2',
            'zip': False,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        }
    }

    # Set GRASS GUI options and flags to python variables
    gui_product = options['product']

    # Variable assigned from USGS product dictionary
    nav_string = usgs_product_dict[gui_product]
    product = nav_string['product']
    product_format = nav_string['format']
    product_extension = nav_string['extension']
    product_is_zip = nav_string['zip']
    product_srs = nav_string['srs']
    product_proj4 = nav_string['srs_proj4']
    product_interpolation = nav_string['interpolation']
    product_url_split = nav_string['url_split']
    product_extent = nav_string['extent']
    gui_subset = None

    # Parameter assignments for each dataset
    if gui_product == 'ned':
        gui_dataset = options['ned_dataset']
        ned_api_name = ''
        if options['ned_dataset'] == 'ned1sec':
            ned_data_abbrv = 'ned_1arc_'
            ned_api_name = '1 arc-second'
        if options['ned_dataset'] == 'ned13sec':
            ned_data_abbrv = 'ned_13arc_'
            ned_api_name = '1/3 arc-second'
        if options['ned_dataset'] == 'ned19sec':
            ned_data_abbrv = 'ned_19arc_'
            ned_api_name = '1/9 arc-second'
        product_tag = product + " " + ned_api_name

    if gui_product == 'nlcd':
        gui_dataset = options['nlcd_dataset']
        if options['nlcd_dataset'] == 'nlcd2001':
            gui_dataset = 'National Land Cover Database (NLCD) - 2001'
        if options['nlcd_dataset'] == 'nlcd2006':
            gui_dataset = 'National Land Cover Database (NLCD) - 2006'
        if options['nlcd_dataset'] == 'nlcd2011':
            gui_dataset = 'National Land Cover Database (NLCD) - 2011'

        if options['nlcd_subset'] == 'landcover':
            gui_subset = 'Land Cover'
        if options['nlcd_subset'] == 'impervious':
            gui_subset = 'Percent Developed Imperviousness'
        if options['nlcd_subset'] == 'canopy':
            gui_subset = 'Percent Tree Canopy'
        product_tag = gui_dataset

    if gui_product == 'naip':
        gui_dataset = 'Imagery - 1 meter (NAIP)'
        product_tag = nav_string['product']

    # Assigning further parameters from GUI
    gui_output_layer = options['output_name']
    gui_resampling_method = options['resampling_method']
    gui_i_flag = flags['i']
    gui_k_flag = flags['k']
    work_dir = options['output_directory']

    # Returns current units
    try:
        proj = gscript.parse_command('g.proj', flags='g')
        if gscript.locn_is_latlong():
            product_resolution = nav_string['dataset'][gui_dataset][0]
        elif float(proj['meters']) == 1:
            product_resolution = nav_string['dataset'][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string['dataset'][gui_dataset][2]
    except TypeError:
        product_resolution = False

    if gui_resampling_method == 'default':
        gui_resampling_method = nav_string['interpolation']
        gscript.verbose(
            _("The default resampling method for product {product} is {res}").
            format(product=gui_product, res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    min_coords = gscript.read_command('m.proj',
                                      coordinates=(gregion['w'], gregion['s']),
                                      proj_out=product_proj4,
                                      separator='comma',
                                      flags='d')
    max_coords = gscript.read_command('m.proj',
                                      coordinates=(gregion['e'], gregion['n']),
                                      proj_out=product_proj4,
                                      separator='comma',
                                      flags='d')
    min_list = min_coords.split(',')[:2]
    max_list = max_coords.split(',')[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = urllib.quote_plus(gui_prod_str)
    prod_format = urllib.quote_plus(product_format)
    prod_extent = urllib.quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://viewer.nationalmap.gov/tnmaccess/api/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == 'nlcd':
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try:
        TNM_API_GET = urllib2.urlopen(TNM_API_URL, timeout=12)
    except urllib2.URLError:
        gscript.fatal(
            _("USGS TNM API query has timed out. Check network configuration. Please try again."
              ))
    except:
        gscript.fatal(
            _("USGS TNM API query has timed out. Check network configuration. Please try again."
              ))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON['errors']:
            TNM_API_error = return_JSON['errors']
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)
        if f['datasets'][0] not in dataset_name:
            if len(dataset_name) <= 1:
                dataset_name.append(str(f['datasets'][0]))

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON['total'])
    tiles_needed_count = 0
    size_diff_tolerance = 5
    exist_dwnld_size = 0
    if tile_API_count > 0:
        dwnld_size = []
        dwnld_url = []
        dataset_name = []
        TNM_file_titles = []
        exist_dwnld_url = []
        exist_TNM_titles = []
        exist_zip_list = []
        exist_tile_list = []
        extract_zip_list = []
        # for each file returned, assign variables to needed parameters
        for f in return_JSON['items']:
            TNM_file_title = f['title']
            TNM_file_URL = str(f['downloadURL'])
            TNM_file_size = int(f['sizeInBytes'])
            TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
            if gui_product == 'ned':
                local_file_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
                local_zip_path = os.path.join(work_dir,
                                              ned_data_abbrv + TNM_file_name)
                local_tile_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
            else:
                local_file_path = os.path.join(work_dir, TNM_file_name)
                local_zip_path = os.path.join(work_dir, TNM_file_name)
                local_tile_path = os.path.join(work_dir, TNM_file_name)
            file_exists = os.path.exists(local_file_path)
            file_complete = None
            # if file exists, but is incomplete, remove file and redownload
            if file_exists:
                existing_local_file_size = os.path.getsize(local_file_path)
                # if local file is incomplete
                if abs(existing_local_file_size -
                       TNM_file_size) > size_diff_tolerance:
                    # add file to cleanup list
                    cleanup_list.append(local_file_path)
                    # NLCD API query returns subsets that cannot be filtered before
                    # results are returned. gui_subset is used to filter results.
                    if not gui_subset:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            down_list()
                        else:
                            continue
                else:
                    if not gui_subset:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            exist_list()
                            exist_dwnld_size += TNM_file_size
                        else:
                            continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                        continue

    # return fatal error if API query returns no results for GUI input
    elif tile_API_count == 0:
        gscript.fatal(
            _("TNM API ERROR or Zero tiles available for given input parameters."
              ))

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    if exist_tile_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)
    if cleanup_list:
        cleanup_msg = _(
            "\n{0} existing incomplete file(s) detected and removed. Run module again."
        ).format(len(cleanup_list))
        gscript.fatal(cleanup_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = '0'

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = 'none'

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == 'nlcd':
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = "NLCD {0} data unavailable for input parameters".format(
                        gui_subset)
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
            "USGS file(s) to download:",
            "-------------------------",
            "Total download size:\t{size}",
            "Tile count:\t{count}",
            "USGS SRS:\t{srs}",
            "USGS tile titles:\n{tile}",
            "-------------------------",
        )
        data_info = '\n'.join(data_info).format(size=total_size_str,
                                                count=file_download_count,
                                                srs=product_srs,
                                                tile=TNM_file_titles_info)
    print data_info

    if gui_i_flag:
        gscript.info(
            _("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == 'ned':
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            # download files in chunks rather than write complete files to memory
            dwnld_req = urllib2.urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()['Content-Length'])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
            local_file.close()
            download_count += 1
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
            file_complete = "Download {0} of {1}: COMPLETE".format(
                download_count, TNM_count)
            gscript.info(file_complete)
        except urllib2.URLError:
            gscript.fatal(
                _("USGS download request has timed out. Network or formatting error."
                  ))
        except StandardError:
            cleanup_list.append(local_file_path)
            if download_count:
                file_failed = "Download {0} of {1}: FAILED".format(
                    download_count, TNM_count)
                gscript.fatal(file_failed)

    # sets already downloaded zip files or tiles to be extracted or imported
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        for z in local_zip_path_list:
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.endswith(product_extension):
                            extracted_tile = os.path.join(work_dir, str(f))
                            if os.path.exists(extracted_tile):
                                os.remove(extracted_tile)
                                read_zip.extract(f, work_dir)
                            else:
                                read_zip.extract(f, work_dir)
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    cleanup_list.append(extracted_tile)
            except:
                cleanup_list.append(extracted_tile)
                gscript.fatal(
                    _("Unable to locate or extract IMG file from ZIP archive.")
                )

    # operations for extracted or complete files available locally
    for t in local_tile_path_list:
        # create variables for use in GRASS GIS import process
        LT_file_name = os.path.basename(t)
        LT_layer_name = os.path.splitext(LT_file_name)[0]
        patch_names.append(LT_layer_name)
        in_info = ("Importing and reprojecting {0}...").format(LT_file_name)
        gscript.info(in_info)
        # import to GRASS GIS
        try:
            gscript.run_command('r.import',
                                input=t,
                                output=LT_layer_name,
                                resolution='value',
                                resolution_value=product_resolution,
                                extent="region",
                                resample=product_interpolation)
            # do not remove by default with NAIP, there are no zip files
            if gui_product != 'naip' or not gui_k_flag:
                cleanup_list.append(t)
        except CalledModuleError:
            in_error = ("Unable to import '{0}'").format(LT_file_name)
            gscript.fatal(in_error)

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if completed_tiles_count > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command('g.region',
                                        res=product_resolution,
                                        flags='a')
                if gui_product == 'naip':
                    for i in ('1', '2', '3', '4'):
                        patch_names_i = [
                            name + '.' + i for name in patch_names
                        ]
                        gscript.run_command('r.patch',
                                            input=patch_names_i,
                                            output=gui_output_layer + '.' + i)
                else:
                    gscript.run_command('r.patch',
                                        input=patch_names,
                                        output=gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added"
                            ).format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if 'k' flag
                if not gui_k_flag:
                    if gui_product == 'naip':
                        for i in ('1', '2', '3', '4'):
                            patch_names_i = [
                                name + '.' + i for name in patch_names
                            ]
                            gscript.run_command('g.remove',
                                                type='raster',
                                                name=patch_names_i,
                                                flags='f')
                    else:
                        gscript.run_command('g.remove',
                                            type='raster',
                                            name=patch_names,
                                            flags='f')
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
        elif completed_tiles_count == 1:
            if gui_product == 'naip':
                for i in ('1', '2', '3', '4'):
                    gscript.run_command('g.rename',
                                        raster=(patch_names[0] + '.' + i,
                                                gui_output_layer + '.' + i))
            else:
                gscript.run_command('g.rename',
                                    raster=(patch_names[0], gui_output_layer))
        temp_down_count = "\n{0} of {1} tile/s succesfully imported and patched.".format(
            completed_tiles_count, tiles_needed_count)
        gscript.info(temp_down_count)
    else:
        gscript.fatal("Error downloading files. Please retry.")

    # Keep source files if 'k' flag active
    if gui_k_flag:
        src_msg = (
            "<k> flag selected: Source tiles remain in '{0}'").format(work_dir)
        gscript.info(src_msg)

    # set appropriate color table
    if gui_product == 'ned':
        gscript.run_command('r.colors',
                            map=gui_output_layer,
                            color='elevation')

    # composite NAIP
    if gui_product == 'naip':
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=gui_output_layer + '.1')
        gscript.run_command('r.composite',
                            red=gui_output_layer + '.1',
                            green=gui_output_layer + '.2',
                            blue=gui_output_layer + '.3',
                            output=gui_output_layer)
        gscript.del_temp_region()
Ejemplo n.º 33
0
def main(options, flags):
    # options and flags into variables
    ipl = options['input']
    raster_exists(ipl)
    opl = options['output']
    # size option backwards compatibility with window
    if not options['size'] and not options['window']:
        gs.fatal(_("Required parameter <%s> not set") % 'size')
    if options['size']:
        wz = int(options['size'])
    if options['window']:
        gs.warning(_("The window option is deprecated, use the option"
                     " size instead"))
        wz = int(options['window'])
    if options['size'] and options['size'] != '3' and options['window']:
        gs.warning(_("When the obsolete window option is used, the"
                     " new size option is ignored"))
    if wz % 2 == 0:
        gs.fatal(_("Please provide an odd number for the moving"
                   " window size, not %d") % wz)
    # user wants pf or pff
    user_pf = options['pf']
    user_pff = options['pff']
    # backwards compatibility
    if flags['t']:
        gs.warning(_("The -t flag is deprecated, use pf and pff options"
                     " instead"))
    if not user_pf and not user_pff and flags['t']:
        user_pf = opl + '_pf'
        user_pff = opl + '_pff'
    elif flags['t']:
        gs.warning(_("When pf or pff option is used, the -t flag"
                     " is ignored"))
    flag_r = flags['r']
    flag_s = flags['s']
    clip_output = flags['a']

    # set to current input map region if requested by the user
    # default is (and should be) the current region
    # we could use tmp region for this but if the flag is there
    # it makes sense to use it from now on (but we should reconsider)
    if flag_r:
        gs.message(_("Setting region to input map..."))
        gs.run_command('g.region', raster=ipl, quiet=True)

    # check if map values are limited to 1 and 0
    input_info = gs.raster_info(ipl)
    # we know what we are doing only when input is integer
    if input_info['datatype'] != 'CELL':
        gs.fatal(_("The input raster map must have type CELL"
                   " (integer)"))
    # for integer, we just need to text min and max
    if input_info['min'] != 0 or input_info['max'] != 1:
        gs.fatal(_("The input raster map must be a binary raster,"
                   " i.e. it should contain only values 0 and 1"
                   " (now the minimum is %d and maximum is %d)")
                 % (input_info['min'], input_info['max']))

    # computing pf values
    # let forested pixels be x and number of all pixels in moving window
    # be y, then pf=x/y"

    gs.info(_("Step 1: Computing Pf values..."))

    # generate grid with pixel-value=number of forest-pixels in window
    # generate grid with pixel-value=number of pixels in moving window:
    tmpA2 = tmpname('tmpA01_')
    tmpC3 = tmpname('tmpA02_')
    gs.run_command("r.neighbors", quiet=True, input=ipl,
                   output=[tmpA2, tmpC3], method=["sum", "count"], size=wz)

    # create pf map
    if user_pf:
        pf = user_pf
    else:
        pf = tmpname('tmpA03_')
    mapcalc("$pf = if( $ipl >=0, float($tmpA2) / float($tmpC3))",
            ipl=ipl, pf=pf, tmpA2=tmpA2, tmpC3=tmpC3)

    # computing pff values
    # Considering pairs of pixels in cardinal directions in
    # a 3x3 window, the total number of adjacent pixel pairs is 12.
    # Assuming that x pairs include at least one forested pixel, and
    # y of those pairs are forest-forest pairs, so pff equals y/x.

    gs.info(_("Step 2: Computing Pff values..."))

    # create copy of forest map and convert NULL to 0 (if any)
    tmpC4 = tmpname('tmpA04_')
    gs.run_command("g.copy", raster=[ipl, tmpC4], quiet=True)
    gs.run_command("r.null", map=tmpC4, null=0, quiet=True)

    # window dimensions
    max_index = int((wz - 1) / 2)
    # number of 'forest-forest' pairs
    expr1 = pairs_expression(map_name=tmpC4, max_index=max_index,
                             combine_op='&')
    # number of 'nonforest-forest' pairs
    expr2 = pairs_expression(map_name=tmpC4, max_index=max_index,
                             combine_op='|')
    # create pff map
    if user_pff:
        pff = user_pff
    else:
        pff = tmpname('tmpA07_')
    # potentially this can be split and parallelized
    mapcalc("$pff = if($ipl >= 0, float($tmpl4) / float($tmpl5))",
            ipl=ipl, tmpl4=expr1, tmpl5=expr2, pff=pff)

    # computing fragmentation index
    # (a b) name, condition
    # where a is a number used by Riitters et al. in ERRATUM (2)
    # and b is a number used in the sh script by Sambale and Sylla
    # b also defines 0 for non-forested which is consistent with input
    # (1 3) edge, if Pf > 0.6 and Pf - Pff < 0
    # (2 6) undetermined, if Pf > 0.6 and Pf = Pff
    # (3 4) perforated, if Pf > 0.6 and Pf - Pff > 0
    # (4 5) interior, if Pf = 1.0
    # (5 1) patch, if Pf < 0.4
    # (6 2) transitional, if 0.4 < Pf < 0.6

    gs.info(_("Step 3: Computing fragmentation index..."))

    if clip_output:
        indexfin2 = tmpname('tmpA16_')
    else:
        indexfin2 = opl
    mapcalc(
        "eval("
        "dpf = $pf - $pff,"
        # individual classes
        "patch = if($pf < 0.4, 1, 0),"
        "transitional = if($pf >= 0.4 && $pf < 0.6, 2, 0),"
        "edge = if($pf >= 0.6 && dpf<0,3,0),"
        "perforated = if($pf > 0.6 && $pf < 1 && dpf > 0, 4, 0),"
        "interior = if($pf == 1, 5, 0),"
        "undetermined = if($pf > 0.6 && $pf < 1 && dpf == 0, 6, 0),"
        # null is considered as non-forest and we need to do it before +
        "patch = if(isnull(patch), 0, patch),"
        "transitional = if(isnull(transitional), 0, transitional),"
        "edge = if(isnull(edge), 0, edge),"
        "perforated = if(isnull(perforated), 0, perforated),"
        "interior = if(isnull(interior), 0, interior),"
        "undetermined = if(isnull(undetermined), 0, undetermined),"
        # combine classes (they don't overlap)
        # more readable than nested ifs from the ifs above
        "all = patch + transitional + edge + perforated + interior"
        " + undetermined"
        ")\n"
        # mask result by non-forest (according to the input)
        # removes the nonsense data created in the non-forested areas
        "$out = all * $binary_forest",
        out=indexfin2, binary_forest=ipl, pf=pf, pff=pff)

    # shrink the region
    if clip_output:
        gs.use_temp_region()
        reginfo = gs.parse_command("g.region", flags="gp")
        nscor = max_index * float(reginfo['nsres'])
        ewcor = max_index * float(reginfo['ewres'])
        gs.run_command("g.region",
                       n=float(reginfo['n']) - nscor,
                       s=float(reginfo['s']) + nscor,
                       e=float(reginfo['e']) - ewcor,
                       w=float(reginfo['w']) + ewcor,
                       quiet=True)
        mapcalc("$opl = $if3", opl=opl, if3=indexfin2, quiet=True)

    # create categories
    # TODO: parametrize classes (also in r.mapcalc, r.colors and desc)?
    # TODO: translatable labels?
    labels = LABELS
    gs.write_command("r.category", quiet=True, map=opl,
                     rules='-', stdin=labels, separator='space')

    # create color table
    colors = COLORS_SAMBALE
    gs.write_command("r.colors", map=opl, rules='-',
                     stdin=colors, quiet=True)

    # write metadata for main layer
    gs.run_command("r.support", map=opl,
                   title="Forest fragmentation",
                   source1="Based on %s" % ipl,
                   description="Forest fragmentation index (6 classes)")
    gs.raster_history(opl)

    # write metadata for intermediate layers
    if user_pf:
        # pf layer
        gs.run_command("r.support", map=pf,
                       title="Proportion forested",
                       units="Proportion",
                       source1="Based on %s" % ipl,
                       description="Proportion of pixels in the moving"
                                   " window that is forested")
        gs.raster_history(pf)

    if user_pff:
        # pff layer
        unused, tmphist = tempfile.mkstemp()
        text_file = open(tmphist, "w")
        long_description = """\
Proportion of all adjacent (cardinal directions only) pixel pairs that
include at least one forest pixel for which both pixels are forested.
It thus (roughly) estimates the conditional probability that, given a
pixel of forest, its neighbor is also forest.
"""
        text_file.write(long_description)
        text_file.close()
        gs.run_command("r.support", map=pff,
                       title="Conditional probability neighboring cell"
                             " is forest",
                       units="Proportion",
                       source1="Based on %s" % ipl,
                       description="Probability neighbor of forest cell"
                                   " is forest",
                       loadhistory=tmphist)
        gs.raster_history(pff)
        os.remove(tmphist)

    # report fragmentation index and names of layers created

    if flag_s:
        gs.run_command("r.report", map=opl, units=["h", "p"],
                       flags="n", page_width=50, quiet=True)

    gs.info(_("The following layers were created"))
    gs.info(_("The fragmentation index: %s") % opl)
    if user_pf:
        gs.info(_("The proportion forested (Pf): %s") % pf)
    if user_pff:
        gs.info(_("The proportion forested pixel pairs (Pff): %s") % pff)
Ejemplo n.º 34
0
def main():
    options, flags = grass.parser()

    elevation_input = options['elevation']
    aspect_input = options['aspect']
    slope_input = options['slope']
    linke = options['linke']
    linke_value = options['linke_value']
    albedo = options['albedo']
    albedo_value = options['albedo_value']
    coeff_bh = options['coeff_bh']
    coeff_bh_strds = options['coeff_bh_strds']
    coeff_dh = options['coeff_dh']
    coeff_dh_strds = options['coeff_dh_strds']
    lat = options['lat']
    long_ = options['long']

    beam_rad_basename = beam_rad_basename_user = options['beam_rad_basename']
    diff_rad_basename = diff_rad_basename_user = options['diff_rad_basename']
    refl_rad_basename = refl_rad_basename_user = options['refl_rad_basename']
    glob_rad_basename = glob_rad_basename_user = options['glob_rad_basename']
    incidout_basename = options['incidout_basename']

    beam_rad = options['beam_rad']
    diff_rad = options['diff_rad']
    refl_rad = options['refl_rad']
    glob_rad = options['glob_rad']

    has_output = any([
        beam_rad_basename, diff_rad_basename, refl_rad_basename,
        glob_rad_basename, incidout_basename, beam_rad, diff_rad, refl_rad,
        glob_rad
    ])

    if not has_output:
        grass.fatal(_("No output specified."))

    start_time = float(options['start_time'])
    end_time = float(options['end_time'])
    time_step = float(options['time_step'])
    nprocs = int(options['nprocs'])
    day = int(options['day'])
    civil_time = float(
        options['civil_time']) if options['civil_time'] else None
    distance_step = float(
        options['distance_step']) if options['distance_step'] else None
    solar_constant = float(
        options['solar_constant']) if options['solar_constant'] else None
    if solar_constant:
        # check it's newer version of r.sun
        if not module_has_parameter('r.sun', 'solar_constant'):
            grass.warning(
                _("This version of r.sun lacks solar_constant option, "
                  "it will be ignored. Use newer version of r.sun."))
            solar_constant = None
    temporal = flags['t']
    binary = flags['b']
    mode1 = True if options['mode'] == 'mode1' else False
    mode2 = not mode1
    tmpName = 'tmp'
    year = int(options['year'])
    rsun_flags = ''
    if flags['m']:
        rsun_flags += 'm'
    if flags['p']:
        rsun_flags += 'p'

    if not is_grass_7() and temporal:
        grass.warning(_("Flag t has effect only in GRASS 7"))

    # check: start < end
    if start_time > end_time:
        grass.fatal(_("Start time is after end time."))
    if time_step >= end_time - start_time:
        grass.fatal(_("Time step is too big."))

    if mode2 and incidout_basename:
        grass.fatal(_("Can't compute incidence angle in mode 2"))
    if flags['c'] and mode1:
        grass.fatal(
            _("Can't compute cumulative irradiation rasters in mode 1"))
    if mode2 and flags['b']:
        grass.fatal(_("Can't compute binary rasters in mode 2"))
    if any((beam_rad, diff_rad, refl_rad, glob_rad)) and mode1:
        grass.fatal(_("Can't compute irradiation raster maps in mode 1"))

    if beam_rad and not beam_rad_basename:
        beam_rad_basename = create_tmp_map_name('beam_rad')
        MREMOVE.append(beam_rad_basename)
    if diff_rad and not diff_rad_basename:
        diff_rad_basename = create_tmp_map_name('diff_rad')
        MREMOVE.append(diff_rad_basename)
    if refl_rad and not refl_rad_basename:
        refl_rad_basename = create_tmp_map_name('refl_rad')
        MREMOVE.append(refl_rad_basename)
    if glob_rad and not glob_rad_basename:
        glob_rad_basename = create_tmp_map_name('glob_rad')
        MREMOVE.append(glob_rad_basename)

    # here we check all the days
    if not grass.overwrite():
        check_time_map_names(beam_rad_basename,
                             grass.gisenv()['MAPSET'], start_time, end_time,
                             time_step, binary, tmpName, mode1)
        check_time_map_names(diff_rad_basename,
                             grass.gisenv()['MAPSET'], start_time, end_time,
                             time_step, binary, tmpName, mode1)
        check_time_map_names(refl_rad_basename,
                             grass.gisenv()['MAPSET'], start_time, end_time,
                             time_step, binary, tmpName, mode1)
        check_time_map_names(glob_rad_basename,
                             grass.gisenv()['MAPSET'], start_time, end_time,
                             time_step, binary, tmpName, mode1)

    # check for slope/aspect
    if not aspect_input or not slope_input:
        params = {}
        if not aspect_input:
            aspect_input = create_tmp_map_name('aspect')
            params.update({'aspect': aspect_input})
            REMOVE.append(aspect_input)
        if not slope_input:
            slope_input = create_tmp_map_name('slope')
            params.update({'slope': slope_input})
            REMOVE.append(slope_input)

        grass.info(_("Running r.slope.aspect..."))
        grass.run_command('r.slope.aspect',
                          elevation=elevation_input,
                          quiet=True,
                          **params)

    grass.info(_("Running r.sun in a loop..."))
    count = 0
    # Parallel processing
    proc_list = []
    proc_count = 0
    suffixes = []
    suffixes_all = []
    if mode1:
        times = list(frange1(start_time, end_time, time_step))
    else:
        times = list(frange2(start_time, end_time, time_step))
    num_times = len(times)
    core.percent(0, num_times, 1)
    for time in times:
        count += 1
        core.percent(count, num_times, 10)

        coeff_bh_raster = coeff_bh
        if coeff_bh_strds:
            coeff_bh_raster = get_raster_from_strds(year,
                                                    day,
                                                    time,
                                                    strds=coeff_bh_strds)
        coeff_dh_raster = coeff_dh
        if coeff_dh_strds:
            coeff_dh_raster = get_raster_from_strds(year,
                                                    day,
                                                    time,
                                                    strds=coeff_dh_strds)

        suffix = '_' + format_time(time)
        proc_list.append(
            Process(target=run_r_sun,
                    args=(elevation_input, aspect_input, slope_input, day,
                          time, civil_time, linke, linke_value, albedo,
                          albedo_value, coeff_bh_raster, coeff_dh_raster, lat,
                          long_, beam_rad_basename, diff_rad_basename,
                          refl_rad_basename, glob_rad_basename,
                          incidout_basename, suffix, binary, tmpName,
                          None if mode1 else time_step, distance_step,
                          solar_constant, rsun_flags)))

        proc_list[proc_count].start()
        proc_count += 1
        suffixes.append(suffix)
        suffixes_all.append(suffix)

        if proc_count == nprocs or proc_count == num_times or count == num_times:
            proc_count = 0
            exitcodes = 0
            for proc in proc_list:
                proc.join()
                exitcodes += proc.exitcode

            if exitcodes != 0:
                core.fatal(_("Error while r.sun computation"))

            # Empty process list
            proc_list = []
            suffixes = []

    if beam_rad:
        sum_maps(beam_rad, beam_rad_basename, suffixes_all)
        set_color_table([beam_rad])
    if diff_rad:
        sum_maps(diff_rad, diff_rad_basename, suffixes_all)
        set_color_table([diff_rad])
    if refl_rad:
        sum_maps(refl_rad, refl_rad_basename, suffixes_all)
        set_color_table([refl_rad])
    if glob_rad:
        sum_maps(glob_rad, glob_rad_basename, suffixes_all)
        set_color_table([glob_rad])

    if not any([
            beam_rad_basename_user, diff_rad_basename_user,
            refl_rad_basename_user, glob_rad_basename_user
    ]):
        return 0

    # cumulative sum
    if flags['c']:
        copy_tmp = create_tmp_map_name('copy')
        REMOVE.append(copy_tmp)
        for each in (beam_rad_basename_user, diff_rad_basename_user,
                     refl_rad_basename_user, glob_rad_basename_user):
            if each:
                previous = each + suffixes_all[0]
                for suffix in suffixes_all[1:]:
                    new = each + suffix
                    grass.run_command('g.copy',
                                      raster=[new, copy_tmp],
                                      quiet=True)
                    grass.mapcalc("{new} = {previous} + {current}".format(
                        new=new, previous=previous, current=copy_tmp),
                                  overwrite=True)
                    previous = new

    # add timestamps either via temporal framework in 7 or r.timestamp in 6.x
    if is_grass_7() and temporal:
        core.info(_("Registering created maps into temporal dataset..."))
        import grass.temporal as tgis

        def registerToTemporal(basename, suffixes, mapset, start_time,
                               time_step, title, desc):
            maps = ','.join(
                [basename + suf + '@' + mapset for suf in suffixes])
            tgis.open_new_stds(basename,
                               type='strds',
                               temporaltype='absolute',
                               title=title,
                               descr=desc,
                               semantic='mean',
                               dbif=None,
                               overwrite=grass.overwrite())
            tgis.register_maps_in_space_time_dataset(type='raster',
                                                     name=basename,
                                                     maps=maps,
                                                     start=start_time,
                                                     end=None,
                                                     increment=time_step,
                                                     dbif=None,
                                                     interval=False)

        # Make sure the temporal database exists
        tgis.init()

        mapset = grass.gisenv()['MAPSET']
        if mode2:
            start_time += 0.5 * time_step
        absolute_time = datetime.datetime(year, 1, 1) + \
                        datetime.timedelta(days=day - 1) + \
                        datetime.timedelta(hours=start_time)
        start = absolute_time.strftime("%Y-%m-%d %H:%M:%S")
        step = datetime.timedelta(hours=time_step)
        step = "%d seconds" % step.seconds

        if beam_rad_basename_user:
            registerToTemporal(
                beam_rad_basename_user,
                suffixes_all,
                mapset,
                start,
                step,
                title="Beam irradiance" if mode1 else "Beam irradiation",
                desc="Output beam irradiance raster maps [W.m-2]"
                if mode1 else "Output beam irradiation raster maps [Wh.m-2]")
        if diff_rad_basename_user:
            registerToTemporal(
                diff_rad_basename_user,
                suffixes_all,
                mapset,
                start,
                step,
                title="Diffuse irradiance" if mode1 else "Diffuse irradiation",
                desc="Output diffuse irradiance raster maps [W.m-2]" if mode1
                else "Output diffuse irradiation raster maps [Wh.m-2]")
        if refl_rad_basename_user:
            registerToTemporal(
                refl_rad_basename_user,
                suffixes_all,
                mapset,
                start,
                step,
                title="Reflected irradiance"
                if mode1 else "Reflected irradiation",
                desc="Output reflected irradiance raster maps [W.m-2]" if mode1
                else "Output reflected irradiation raster maps [Wh.m-2]")
        if glob_rad_basename_user:
            registerToTemporal(
                glob_rad_basename_user,
                suffixes_all,
                mapset,
                start,
                step,
                title="Total irradiance" if mode1 else "Total irradiation",
                desc="Output total irradiance raster maps [W.m-2]"
                if mode1 else "Output total irradiation raster maps [Wh.m-2]")
        if incidout_basename:
            registerToTemporal(incidout_basename,
                               suffixes_all,
                               mapset,
                               start,
                               step,
                               title="Incidence angle",
                               desc="Output incidence angle raster maps")

    else:
        absolute_time = datetime.datetime(year, 1, 1) + \
                        datetime.timedelta(days=day - 1)
        for i, time in enumerate(times):
            grass_time = format_grass_time(absolute_time +
                                           datetime.timedelta(hours=time))
            if beam_rad_basename_user:
                set_time_stamp(beam_rad_basename_user + suffixes_all[i],
                               time=grass_time)
            if diff_rad_basename_user:
                set_time_stamp(diff_rad_basename_user + suffixes_all[i],
                               time=grass_time)
            if refl_rad_basename_user:
                set_time_stamp(refl_rad_basename_user + suffixes_all[i],
                               time=grass_time)
            if glob_rad_basename_user:
                set_time_stamp(glob_rad_basename_user + suffixes_all[i],
                               time=grass_time)
            if incidout_basename:
                set_time_stamp(incidout_basename + suffixes_all[i],
                               time=grass_time)

    if beam_rad_basename_user:
        maps = [beam_rad_basename_user + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if diff_rad_basename_user:
        maps = [diff_rad_basename_user + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if refl_rad_basename_user:
        maps = [refl_rad_basename_user + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if glob_rad_basename_user:
        maps = [glob_rad_basename_user + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if incidout_basename:
        maps = [incidout_basename + suf for suf in suffixes_all]
        set_color_table(maps)
Ejemplo n.º 35
0
def main():
    options, flags = gscript.parser()

    input_raster = options['input']
    points = options['output']
    if options['sampled']:
        sampled_rasters = options['sampled'].split(',')
    else:
        sampled_rasters = []
    npoints = [int(num) for num in options['npoints'].split(',')]
    flag_s = flags['s']

    if gscript.find_file(name='MASK', element='cell', mapset=gscript.gisenv()['MAPSET'])['name']:
        gscript.fatal(_("MASK is active. Please remove it before proceeding."))

    # we clean up mask too, so register after we know that mask is not present
    atexit.register(cleanup)

    temp_name = 'tmp_r_sample_category_{}_'.format(os.getpid())
    points_nocats = temp_name + 'points_nocats'
    TMP.append(points_nocats)

    # input must be CELL
    rdescribe = gscript.read_command('r.stats', flags='ln', input=input_raster, separator='pipe')
    catlab = rdescribe.splitlines()
    categories = map(int, [z.split('|')[0] for z in catlab])
    catlab = dict([z.split('|') for z in catlab])
    if len(npoints) == 1:
        npoints = npoints * len(categories)
    else:
        if len(categories) != len(npoints):
            gscript.fatal(_("Number of categories in raster does not match the number of provided sampling points numbers."))

    # Create sample points per category
    vectors = []
    for i, cat in enumerate(categories):
        # skip generating points if none are required
        if npoints[i] == 0:
            continue
        gscript.info(_("Selecting {n} sampling locations at category {cat}...").format(n=npoints[i], cat=cat))
        # change mask to sample zeroes and then change again to sample ones
        # overwrite mask for an easy loop
        gscript.run_command('r.mask', raster=input_raster, maskcats=cat, overwrite=True, quiet=True)

        # Check number of cells in category
        nrc = int(gscript.parse_command('r.univar', map=input_raster, flags='g')['n'])
        if nrc < npoints[i]:
            if flag_s:
                gscript.info(_("Not enough points in category {cat}. Skipping").format(cat=categories[i]))
                continue
            gscript.warning(_("Number of raster cells in category {cat} < {np}. Sampling {n} points").format(cat=categories[i], np=npoints[i], n=nrc))
            npoints[i] = nrc

        # Create the points
        vector = temp_name + str(cat)
        vectors.append(vector)
        gscript.run_command('r.random', input=input_raster, npoints=npoints[i], vector=vector, quiet=True)
        TMP.append(vector)
        gscript.run_command('r.mask', flags='r', quiet=True)

    gscript.run_command('v.patch', input=vectors, output=points, quiet=True)
    # remove and add gain cats so that they are unique
    gscript.run_command('v.category', input=points, option='del', cat=-1, output=points_nocats, quiet=True)
    # overwrite to reuse the map
    gscript.run_command('v.category', input=points_nocats, option='add', output=points, overwrite=True, quiet=True)

    # Sample layers
    columns = []
    column_names = []
    sampled_rasters.insert(0, input_raster)
    for raster in sampled_rasters:
        column = escape_sql_column(strip_mapset(raster).lower())
        column_names.append(column)
        datatype = gscript.parse_command('r.info', flags='g', map=raster)['datatype']
        if datatype == 'CELL':
            datatype = 'integer'
        else:
            datatype = 'double precision'
        columns.append("{column} {datatype}".format(column=column, datatype=datatype))
    gscript.run_command('v.db.addtable', map=points, columns=','.join(columns), quiet=True)
    for raster, column in zip(sampled_rasters, column_names):
        gscript.info(_("Sampling raster map %s...") % raster)
        gscript.run_command('v.what.rast', map=points, type='point', raster=raster, column=column, quiet=True)

    # Add category labels
    if not list(set(catlab.values()))[0] and len(set(catlab.values())) == 1:
        gscript.verbose(_("There are no category labels in the raster to add"))
    else:
        gscript.run_command("v.db.addcolumn", map=points, columns="label varchar(250)")
        table_name = escape_sql_column(strip_mapset(points).lower())
        for i in categories:
            sqlstat = "UPDATE " + table_name + " SET label='" + catlab[str(i)] + "' WHERE " + column_names[0] + " == " + str(i)
            gscript.run_command("db.execute", sql=sqlstat)
Ejemplo n.º 36
0
def main(options, flags):
    """Determine which tiles the user needs to download to cover the region
    of interest as determined by region, vector layer and/or raster layer"""

    variables = options['variables'].split(',')
    region = options['region']
    vector = options['vector']
    raster = options['raster']
    path = options['dir']
    if path:
        path = r"{}/".format(path)
    flag_g = flags['g']

    n, s, w, e = ([] for i in range(4))

    if region:
        reg = grass.parse_command("r.info", flags="g", map=region)
        n.append(float(reg['north']))
        s.append(float(reg['south']))
        w.append(float(reg['west']))
        e.append(float(reg['east']))

    if vector:
        vec = grass.parse_command("v.info", flags="g", map=vector)
        n.append(float(vec['north']))
        s.append(float(vec['south']))
        w.append(float(vec['west']))
        e.append(float(vec['east']))

    if raster:
        ras = grass.parse_command("r.info", flags="g", map=raster)
        n.append(float(ras['north']))
        s.append(float(ras['south']))
        w.append(float(ras['west']))
        e.append(float(ras['east']))

    nbound = max(n)
    sbound = min(s)
    wbound = min(w)
    ebound = max(e)

    ru = int(3 - math.ceil(nbound / 30))
    rd = int(2 - math.floor(sbound / 30))
    cl = int(math.floor(wbound / 30) + 6)
    cr = int(math.ceil(ebound / 30 + 5))
    rows = [ru] if ru == rd else range(ru, rd + 1)
    cols = [cl] if cr == cl else range(cl, cr + 1)
    tiles = [str(x)+str(y) for x in rows for y in cols]

    if len(tiles) > 1:
        M = ', '.join(tiles[:-1]) + ' & ' + tiles[-1]
    elif len(tiles) == 1:
        M = tiles[0]

    if not options['variables']:
        if flag_g:
            print(','.join(tiles))
        else:
            grass.info("To cover your region or interest\n "
                       "download the tiles {}".format(M))
    else:
        download_files(variables, tiles, path)
Ejemplo n.º 37
0
def main():
    options, flags = grass.parser()

    elevation_input = options['elevation']
    aspect_input = options['aspect']
    slope_input = options['slope']
                      
    linke = options['linke']
    linke_value = options['linke_value']
    albedo = options['albedo']
    albedo_value = options['albedo_value']
    horizon_basename = options['horizon_basename']
    horizon_step = options['horizon_step']

    beam_rad_basename = options['beam_rad_basename']
    diff_rad_basename = options['diff_rad_basename']
    refl_rad_basename = options['refl_rad_basename']
    glob_rad_basename = options['glob_rad_basename']
    incidout_basename = options['incidout_basename']

    if not any([beam_rad_basename, diff_rad_basename,
                refl_rad_basename, glob_rad_basename,
                incidout_basename]):
        grass.fatal(_("No output specified."))

    start_time = float(options['start_time'])
    end_time = float(options['end_time'])
    time_step = float(options['time_step'])
    nprocs = int(options['nprocs'])
    day = int(options['day'])
    temporal = flags['t']
    binary = flags['b']
    binaryTmpName = 'binary'
    year = int(options['year'])
   
    

    if not is_grass_7() and temporal:
        grass.warning(_("Flag t has effect only in GRASS 7"))

    # check: start < end
    if start_time > end_time:
        grass.fatal(_("Start time is after end time."))
    if time_step >= end_time - start_time:
        grass.fatal(_("Time step is too big."))

    # here we check all the days
    if not grass.overwrite():
        check_time_map_names(beam_rad_basename, grass.gisenv()['MAPSET'],
                             start_time, end_time, time_step, binary,
                             binaryTmpName)
        check_time_map_names(diff_rad_basename, grass.gisenv()['MAPSET'],
                             start_time, end_time, time_step, binary,
                             binaryTmpName)
        check_time_map_names(refl_rad_basename, grass.gisenv()['MAPSET'],
                             start_time, end_time, time_step, binary,
                             binaryTmpName)
        check_time_map_names(glob_rad_basename, grass.gisenv()['MAPSET'],
                             start_time, end_time, time_step, binary,
                             binaryTmpName)

    # check for slope/aspect
    if not aspect_input or not slope_input:
        params = {}
        if not aspect_input:
            aspect_input = create_tmp_map_name('aspect')
            params.update({'aspect': aspect_input})
            TMP.append(aspect_input)
        if not slope_input:
            slope_input = create_tmp_map_name('slope')
            params.update({'slope': slope_input})
            TMP.append(slope_input)

        grass.info(_("Running r.slope.aspect..."))
        grass.run_command('r.slope.aspect', elevation=elevation_input,
                          quiet=True, **params)

    grass.info(_("Running r.sun in a loop..."))
    count = 0
    # Parallel processing
    proc_list = []
    proc_count = 0
    suffixes = []
    suffixes_all = []
    times = list(frange(start_time, end_time, time_step))
    num_times = len(times)
    core.percent(0, num_times, 1)
    for time in times:
        count += 1
        core.percent(count, num_times, 10)

        suffix = '_' + format_time(time)
        proc_list.append(Process(target=run_r_sun,
                                 args=(elevation_input, aspect_input,
                                       slope_input, day, time,
                                       linke, linke_value,
                                       albedo, albedo_value,
                                       horizon_basename, horizon_step,
                                       beam_rad_basename,
                                       diff_rad_basename,
                                       refl_rad_basename,
                                       glob_rad_basename,                                     
                                       incidout_basename,
                                       suffix,
                                       binary, binaryTmpName)))

        proc_list[proc_count].start()
        proc_count += 1
        suffixes.append(suffix)
        suffixes_all.append(suffix)

        if proc_count == nprocs or proc_count == num_times or count == num_times:
            proc_count = 0
            exitcodes = 0
            for proc in proc_list:
                proc.join()
                exitcodes += proc.exitcode

            if exitcodes != 0:
                core.fatal(_("Error while r.sun computation"))

            # Empty process list
            proc_list = []
            suffixes = []
    # FIXME: how percent really works?
    # core.percent(1, 1, 1)

    # add timestamps either via temporal framework in 7 or r.timestamp in 6.x
    if is_grass_7() and temporal:
        core.info(_("Registering created maps into temporal dataset..."))
        import grass.temporal as tgis

        def registerToTemporal(basename, suffixes, mapset, start_time,
                               time_step, title, desc):
            maps = ','.join([basename + suf + '@' + mapset for suf in suffixes])
            tgis.open_new_stds(basename, type='strds',
                               temporaltype='absolute',
                               title=title, descr=desc,
                               semantic='mean', dbif=None,
                               overwrite=grass.overwrite())
            tgis.register_maps_in_space_time_dataset(
                type='raster', name=basename, maps=maps, start=start_time,
                end=None, increment=time_step, dbif=None, interval=False)
        # Make sure the temporal database exists
        tgis.init()

        mapset = grass.gisenv()['MAPSET']
        absolute_time = datetime.datetime(year, 1, 1) + \
                        datetime.timedelta(days=day - 1) + \
                        datetime.timedelta(hours=start_time)
        start = absolute_time.strftime("%Y-%m-%d %H:%M:%S")
        step = datetime.timedelta(hours=time_step)
        step = "%d seconds" % step.seconds

        if beam_rad_basename:
            registerToTemporal(beam_rad_basename, suffixes_all, mapset, start,
                               step, title="Beam irradiance",
                               desc="Output beam irradiance raster maps [Wh.m-2]")
        if diff_rad_basename:
            registerToTemporal(diff_rad_basename, suffixes_all, mapset, start,
                               step, title="Diffuse irradiance",
                               desc="Output diffuse irradiance raster maps [Wh.m-2]")
        if refl_rad_basename:
            registerToTemporal(refl_rad_basename, suffixes_all, mapset, start,
                               step, title="Reflected irradiance",
                               desc="Output reflected irradiance raster maps [Wh.m-2]")
        if glob_rad_basename:
            registerToTemporal(glob_rad_basename, suffixes_all, mapset, start,
                               step, title="Total irradiance",
                               desc="Output total irradiance raster maps [Wh.m-2]")
        if incidout_basename:
            registerToTemporal(incidout_basename, suffixes_all, mapset, start,
                               step, title="Incidence angle",
                               desc="Output incidence angle raster maps")

    else:
        absolute_time = datetime.datetime(year, 1, 1) + \
                        datetime.timedelta(days=day - 1)
        for i, time in enumerate(times):
            grass_time = format_grass_time(absolute_time + datetime.timedelta(hours=time))
            if beam_rad_basename:
                set_time_stamp(beam_rad_basename + suffixes_all[i],
                               time=grass_time)
            if diff_rad_basename:
                set_time_stamp(diff_rad_basename + suffixes_all[i],
                               time=grass_time)
            if refl_rad_basename:
                set_time_stamp(refl_rad_basename + suffixes_all[i],
                               time=grass_time)
            if glob_rad_basename:
                set_time_stamp(glob_rad_basename + suffixes_all[i],
                               time=grass_time)
            if incidout_basename:
                set_time_stamp(incidout_basename + suffixes_all[i],
                               time=grass_time)

    if beam_rad_basename:
        maps = [beam_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if diff_rad_basename:
        maps = [diff_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if refl_rad_basename:
        maps = [refl_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if glob_rad_basename:
        maps = [glob_rad_basename + suf for suf in suffixes_all]
        set_color_table(maps, binary)
    if incidout_basename:
        maps = [incidout_basename + suf for suf in suffixes_all]
        set_color_table(maps)
Ejemplo n.º 38
0
def main(options, flags):

    gisbase = os.getenv('GISBASE')
    if not gisbase:
        gs.fatal(_('$GISBASE not defined'))
        return 0

    # Variables
    ref = options['reference']
    REF = ref.split(',')
    pro = options['projection']
    if pro:
        PRO = pro.split(',')
    else:
        PRO = REF
    opn = [z.split('@')[0] for z in PRO]
    out = options['output']
    region = options['region']
    flag_d = flags['d']
    flag_e = flags['e']
    flag_p = flags['p']

    # get current region settings, to compare to new ones later
    regbu1 = tmpname("region")
    gs.parse_command("g.region", save=regbu1)

    # Check if region, projected layers or mask is given
    if region:
        ffile = gs.find_file(region, element="region")
        if not ffile:
            gs.fatal(_("the region {} does not exist").format(region))
    if not pro and not checkmask() and not region:
        gs.fatal(_("You need to provide projected layers, a region, or "
                   "a mask has to be set"))
    if pro and len(REF) != len(PRO):
        gs.fatal(_("The number of reference and projection layers need to "
                   "be the same. Your provided %d reference and %d"
                   "projection variables") % (len(REF), len(PRO)))

    # Text for history in metadata
    opt2 = dict((k, v) for k, v in options.iteritems() if v)
    hist = ' '.join("{!s}={!r}".format(k, v) for (k, v) in opt2.iteritems())
    hist = "r.exdet {}".format(hist)
    unused, tmphist = tempfile.mkstemp()
    with open(tmphist, "w") as text_file:
        text_file.write(hist)

    # Create covariance table
    VI = CoVar(maps=REF)

    # Import reference data & compute univar stats per reference layer
    s = len(REF)
    dat_ref = stat_mean = stat_min = stat_max = None
    layer = garray.array()

    for i, map in enumerate(REF):
        layer.read(map, null=np.nan)
        r, c = layer.shape
        if dat_ref is None:
            dat_ref = np.empty((s, r, c), dtype=np.double)
        if stat_mean is None:
            stat_mean = np.empty((s), dtype=np.double)
        if stat_min is None:
            stat_min = np.empty((s), dtype=np.double)
        if stat_max is None:
            stat_max = np.empty((s), dtype=np.double)
        stat_min[i] = np.nanmin(layer)
        stat_mean[i] = np.nanmean(layer)
        stat_max[i] = np.nanmax(layer)
        dat_ref[i, :, :] = layer
    del layer

    # Compute mahalanobis over full set of reference layers
    mahal_ref = mahal(v=dat_ref, m=stat_mean, VI=VI)
    mahal_ref_max = max(mahal_ref[np.isfinite(mahal_ref)])
    if flag_e:
        mahalref = "{}_mahalref".format(out)
        mahal_ref.write(mapname=mahalref)
        gs.info(_("Mahalanobis distance map saved: {}").format(mahalref))
        gs.run_command("r.support", map=mahalref,
                       title="Mahalanobis distance map", units="unitless",
                       description="Mahalanobis distance map in reference "
                                   "domain", loadhistory=tmphist)
    del mahal_ref

    # Remove mask and set new region based on user-defined region or
    # otherwise based on projection layers
    if checkmask():
        gs.run_command("r.mask", flags="r")
    if region:
        gs.run_command("g.region", region=region)
        gs.info(_("The region has set to the region {}").format(region))
    if pro:
        gs.run_command("g.region", raster=PRO[0])
        # TODO: only set region to PRO[0] when different from current region
        gs.info(_("The region has set to match the proj raster layers"))

    # Import projected layers in numpy array
    s = len(PRO)
    dat_pro = None
    layer = garray.array()
    for i, map in enumerate(PRO):
        layer.read(map, null=np.nan)
        r, c = layer.shape
        if dat_pro is None:
            dat_pro = np.empty((s, r, c), dtype=np.double)
        dat_pro[i, :, :] = layer
    del layer

    # Compute mahalanobis distance
    mahal_pro = mahal(v=dat_pro, m=stat_mean, VI=VI)
    if flag_d:
        mahalpro = "{}_mahalpro".format(out)
        mahal_pro.write(mapname=mahalpro)
        gs.info(_("Mahalanobis distance map saved: {}").format(mahalpro))
        gs.run_command("r.support", map=mahalpro,
                       title="Mahalanobis distance map projection domain",
                       units="unitless", loadhistory=tmphist,
                       description="Mahalanobis distance map in projection "
                       "domain estimated using covariance of refence data")

    # Compute NT1
    tmplay = tmpname(out)
    mnames = [None] * len(REF)
    for i in xrange(len(REF)):
        tmpout = tmpname("exdet")
        # TODO: computations below sometimes result in very small negative
        # numbers, which are not 'real', but rather due to some differences
        # in handling digits in grass and python, hence second mapcalc
        # statement. Need to figure out how to handle this better.
        gs.mapcalc("eval("
                   "tmp = min(($prolay - $refmin), ($refmax - $prolay),0) / "
                   "($refmax - $refmin))\n"
                   "$Dij = if(tmp > -0.000000001, 0, tmp)",
                   Dij=tmpout, prolay=PRO[i], refmin=stat_min[i],
                   refmax=stat_max[i], quiet=True)
        mnames[i] = tmpout
    gs.run_command("r.series", quiet=True, input=mnames, output=tmplay,
                   method="sum")

    # Compute most influential covariate (MIC) metric for NT1
    if flag_p:
        tmpla1 = tmpname(out)
        gs.run_command("r.series", quiet=True, output=tmpla1, input=mnames,
                       method="min_raster")

    # Compute NT2
    tmpla2 = tmpname(out)
    nt2map = garray.array()
    nt2map[...] = mahal_pro / mahal_ref_max
    nt2map.write(mapname=tmpla2)

    # Compute  most influential covariate (MIC) metric for NT2
    if flag_p:
        tmpla3 = tmpname(out)
        laylist = []
        layer = garray.array()
        for i, map in enumerate(opn):
            gs.use_temp_region()
            gs.run_command("g.region", quiet=True, region=regbu1)
            REFtmp = [x for num, x in enumerate(REF) if not num == i]
            stattmp = np.delete(stat_mean, i, axis=0)
            VItmp = CoVar(maps=REFtmp)
            gs.del_temp_region()
            dat_protmp = np.delete(dat_pro, i, axis=0)
            ymap = mahal(v=dat_protmp, m=stattmp, VI=VItmp)
            # in Mesgaran et al, the MIC2 is the max icp, but that is the
            # same as the minimum mahalanobis distance (ymap)
            # icp = (mahal_pro - ymap) / mahal_pro * 100
            layer[:, :] = ymap
            tmpmahal = tmpname(out)
            layer.write(tmpmahal)
            laylist.append(tmpmahal)
        gs.run_command("r.series", quiet=True, output=tmpla3,
                       input=laylist, method="min_raster", overwrite=True)

    # Compute nt1, nt2, and nt1and2 novelty maps
    nt1 = "{}_NT1".format(out)
    nt2 = "{}_NT2".format(out)
    nt12 = "{}_NT1NT2".format(out)
    expr = ";".join([
        "$nt12 = if($tmplay < 0, $tmplay, $tmpla2)",
        "$nt2 = if($tmplay >= 0, $tmpla2, null())",
        "$nt1 = if($tmplay < 0, $tmplay, null())"])
    gs.mapcalc(expr, nt12=nt12, nt1=nt1, nt2=nt2, tmplay=tmplay,
               tmpla2=tmpla2, quiet=True)

    # Write metadata nt1, nt2, nt1and2  maps
    gs.run_command("r.support", map=nt1, units="unitless",
                   title="Type 1 similarity",
                   description="Type 1 similarity (NT1)",
                   loadhistory=tmphist)
    gs.run_command("r.support", map=nt2, units="unitless",
                   title="Type 2 similarity",
                   description="Type 2 similarity (NT2)",
                   loadhistory=tmphist)
    gs.run_command("r.support", map=nt12, units="unitless",
                   title="Type 1 + 2 novelty / similarity",
                   description="Type 1 + 2 similarity (NT1)",
                   loadhistory=tmphist)

    # Compute MIC maps
    if flag_p:
        mic12 = "{}_MICNT1and2".format(out)
        expr = "$mic12 = if($tmplay < 0, $tmpla1, " \
               "if($tmpla2>1, $tmpla3, -1))"
        gs.mapcalc(expr, tmplay=tmplay, tmpla1=tmpla1, tmpla2=tmpla2,
                   tmpla3=tmpla3, mic12=mic12, quiet=True)

        # Write category labels to MIC maps
        tmpcat = tempfile.mkstemp()
        with open(tmpcat[1], "w") as text_file:
            text_file.write("-1:None\n")
            for cats in xrange(len(opn)):
                text_file.write("{}:{}\n".format(cats, opn[cats]))
        gs.run_command("r.category", quiet=True, map=mic12, rules=tmpcat[1],
                       separator=":")
        os.remove(tmpcat[1])
        CATV = Module('r.category', map=mic12, stdout_=PIPE).outputs.stdout
        Module('r.category', map=mic12, rules="-", stdin_=CATV, quiet=True)
        gs.run_command("r.support", map=mic12, units="unitless",
                       title="Most influential covariate",
                       description="Most influential covariate (MIC) for NT1"
                                   "and NT2", loadhistory=tmphist)

    # Write color table
    gs.write_command("r.colors", map=nt12, rules='-', stdin=COLORS_EXDET,
                     quiet=True)

    # Finalize
    gs.info(_("Done...."))
Ejemplo n.º 39
0
def import_stds(input,
                output,
                directory,
                title=None,
                descr=None,
                location=None,
                link=False,
                exp=False,
                overr=False,
                create=False,
                stds_type="strds",
                base=None,
                set_current_region=False,
                memory=300):
    """Import space time datasets of type raster and vector

        :param input: Name of the input archive file
        :param output: The name of the output space time dataset
        :param directory: The extraction directory
        :param title: The title of the new created space time dataset
        :param descr: The description of the new created
                     space time dataset
        :param location: The name of the location that should be created,
                        maps are imported into this location
        :param link: Switch to link raster maps instead importing them
        :param exp: Extend location extents based on new dataset
        :param overr: Override projection (use location's projection)
        :param create: Create the location specified by the "location"
                      parameter and exit.
                      Do not import the space time datasets.
        :param stds_type: The type of the space time dataset that
                         should be imported
        :param base: The base name of the new imported maps, it will be
                     extended using a numerical index.
        :param memory: Cache size for raster rows, used in r.in.gdal
    """

    global raise_on_error
    old_state = gscript.raise_on_error
    gscript.set_raise_on_error(True)

    # Check if input file and extraction directory exits
    if not os.path.exists(input):
        gscript.fatal(
            _("Space time raster dataset archive <%s> not found") % input)
    if not create and not os.path.exists(directory):
        gscript.fatal(_("Extraction directory <%s> not found") % directory)

    tar = tarfile.open(name=input, mode='r')

    # Check for important files
    msgr = get_tgis_message_interface()
    msgr.message(
        _("Checking validity of input file (size: %0.1f MB). Make take a while..."
          % (os.path.getsize(input) / (1024 * 1024.0))))
    members = tar.getnames()
    # Make sure that the basenames of the files are used for comparison
    member_basenames = [os.path.basename(name) for name in members]

    if init_file_name not in member_basenames:
        gscript.fatal(_("Unable to find init file <%s>") % init_file_name)
    if list_file_name not in member_basenames:
        gscript.fatal(_("Unable to find list file <%s>") % list_file_name)
    if proj_file_name not in member_basenames:
        gscript.fatal(
            _("Unable to find projection file <%s>") % proj_file_name)

    msgr.message(_("Extracting data..."))
    tar.extractall(path=directory)
    tar.close()

    # We use a new list file name for map registration
    new_list_file_name = list_file_name + "_new"
    # Save current working directory path
    old_cwd = os.getcwd()

    # Switch into the data directory
    os.chdir(directory)

    # Check projection information
    if not location:
        temp_name = gscript.tempfile()
        temp_file = open(temp_name, "w")
        proj_name = os.path.abspath(proj_file_name)

        # We need to convert projection strings generated
        # from other programs than g.proj into
        # new line format so that the grass file comparison function
        # can be used to compare the projections
        proj_name_tmp = temp_name + "_in_projection"
        proj_file = open(proj_name, "r")
        proj_content = proj_file.read()
        proj_content = proj_content.replace(" +", "\n+")
        proj_content = proj_content.replace("\t+", "\n+")
        proj_file.close()

        proj_file = open(proj_name_tmp, "w")
        proj_file.write(proj_content)
        proj_file.close()

        p = gscript.start_command("g.proj", flags="j", stdout=temp_file)
        p.communicate()
        temp_file.close()

        if not gscript.compare_key_value_text_files(
                temp_name, proj_name_tmp, sep="="):
            if overr:
                gscript.warning(
                    _("Projection information does not match. "
                      "Proceeding..."))
            else:
                diff = ''.join(gscript.diff_files(temp_name, proj_name))
                gscript.warning(
                    _("Difference between PROJ_INFO file of "
                      "imported map and of current location:"
                      "\n{diff}").format(diff=diff))
                gscript.fatal(
                    _("Projection information does not match. "
                      "Aborting."))

    # Create a new location based on the projection information and switch
    # into it
    old_env = gscript.gisenv()
    if location:
        try:
            proj4_string = open(proj_file_name, 'r').read()
            gscript.create_location(dbase=old_env["GISDBASE"],
                                    location=location,
                                    proj4=proj4_string)
            # Just create a new location and return
            if create:
                os.chdir(old_cwd)
                return
        except Exception as e:
            gscript.fatal(
                _("Unable to create location %(l)s. Reason: %(e)s") % {
                    'l': location,
                    'e': str(e)
                })
        # Switch to the new created location
        try:
            gscript.run_command("g.mapset",
                                mapset="PERMANENT",
                                location=location,
                                dbase=old_env["GISDBASE"])
        except CalledModuleError:
            gscript.fatal(_("Unable to switch to location %s") % location)
        # create default database connection
        try:
            gscript.run_command("t.connect", flags="d")
        except CalledModuleError:
            gscript.fatal(
                _("Unable to create default temporal database "
                  "in new location %s") % location)

    try:
        # Make sure the temporal database exists
        factory.init()

        fs = "|"
        maplist = []
        mapset = get_current_mapset()
        list_file = open(list_file_name, "r")
        new_list_file = open(new_list_file_name, "w")

        # get number of lines to correctly form the suffix
        max_count = -1
        for max_count, l in enumerate(list_file):
            pass
        max_count += 1
        list_file.seek(0)

        # Read the map list from file
        line_count = 0
        while True:
            line = list_file.readline()
            if not line:
                break

            line_list = line.split(fs)

            # The filename is actually the base name of the map
            # that must be extended by the file suffix
            filename = line_list[0].strip().split(":")[0]
            if base:
                mapname = "%s_%s" % (
                    base, gscript.get_num_suffix(line_count + 1, max_count))
                mapid = "%s@%s" % (mapname, mapset)
            else:
                mapname = filename
                mapid = mapname + "@" + mapset

            row = {}
            row["filename"] = filename
            row["name"] = mapname
            row["id"] = mapid
            row["start"] = line_list[1].strip()
            row["end"] = line_list[2].strip()

            new_list_file.write("%s%s%s%s%s\n" %
                                (mapname, fs, row["start"], fs, row["end"]))

            maplist.append(row)
            line_count += 1

        list_file.close()
        new_list_file.close()

        # Read the init file
        fs = "="
        init = {}
        init_file = open(init_file_name, "r")
        while True:
            line = init_file.readline()
            if not line:
                break

            kv = line.split(fs)
            init[kv[0]] = kv[1].strip()

        init_file.close()

        if "temporal_type" not in init or \
           "semantic_type" not in init or \
           "number_of_maps" not in init:
            gscript.fatal(
                _("Key words %(t)s, %(s)s or %(n)s not found in init"
                  " file.") % {
                      't': "temporal_type",
                      's': "semantic_type",
                      'n': "number_of_maps"
                  })

        if line_count != int(init["number_of_maps"]):
            gscript.fatal(_("Number of maps mismatch in init and list file."))

        format_ = "GTiff"
        type_ = "strds"

        if "stds_type" in init:
            type_ = init["stds_type"]
        if "format" in init:
            format_ = init["format"]

        if stds_type != type_:
            gscript.fatal(
                _("The archive file is of wrong space time dataset"
                  " type"))

        # Check the existence of the files
        if format_ == "GTiff":
            for row in maplist:
                filename = row["filename"] + ".tif"
                if not os.path.exists(filename):
                    gscript.fatal(
                        _("Unable to find GeoTIFF raster file "
                          "<%s> in archive.") % filename)
        elif format_ == "AAIGrid":
            for row in maplist:
                filename = row["filename"] + ".asc"
                if not os.path.exists(filename):
                    gscript.fatal(
                        _("Unable to find AAIGrid raster file "
                          "<%s> in archive.") % filename)
        elif format_ == "GML":
            for row in maplist:
                filename = row["filename"] + ".xml"
                if not os.path.exists(filename):
                    gscript.fatal(
                        _("Unable to find GML vector file "
                          "<%s> in archive.") % filename)
        elif format_ == "pack":
            for row in maplist:
                if type_ == "stvds":
                    filename = str(row["filename"].split(":")[0]) + ".pack"
                else:
                    filename = row["filename"] + ".pack"
                if not os.path.exists(filename):
                    gscript.fatal(
                        _("Unable to find GRASS package file "
                          "<%s> in archive.") % filename)
        else:
            gscript.fatal(_("Unsupported input format"))

        # Check the space time dataset
        id = output + "@" + mapset
        sp = dataset_factory(type_, id)
        if sp.is_in_db() and gscript.overwrite() is False:
            gscript.fatal(
                _("Space time %(t)s dataset <%(sp)s> is already in"
                  " the database. Use the overwrite flag.") % {
                      't': type_,
                      'sp': sp.get_id()
                  })

        # Import the maps
        if type_ == "strds":
            if format_ == "GTiff" or format_ == "AAIGrid":
                _import_raster_maps_from_gdal(maplist, overr, exp, location,
                                              link, format_,
                                              set_current_region, memory)
            if format_ == "pack":
                _import_raster_maps(maplist, set_current_region)
        elif type_ == "stvds":
            if format_ == "GML":
                _import_vector_maps_from_gml(maplist, overr, exp, location,
                                             link)
            if format_ == "pack":
                _import_vector_maps(maplist)

        # Create the space time dataset
        if sp.is_in_db() and gscript.overwrite() is True:
            gscript.info(
                _("Overwrite space time %(sp)s dataset "
                  "<%(id)s> and unregister all maps.") % {
                      'sp': sp.get_new_map_instance(None).get_type(),
                      'id': sp.get_id()
                  })
            sp.delete()
            sp = sp.get_new_instance(id)

        temporal_type = init["temporal_type"]
        semantic_type = init["semantic_type"]
        relative_time_unit = None
        if temporal_type == "relative":
            if "relative_time_unit" not in init:
                gscript.fatal(
                    _("Key word %s not found in init file.") %
                    ("relative_time_unit"))
            relative_time_unit = init["relative_time_unit"]
            sp.set_relative_time_unit(relative_time_unit)

        gscript.verbose(
            _("Create space time %s dataset.") %
            sp.get_new_map_instance(None).get_type())

        sp.set_initial_values(temporal_type=temporal_type,
                              semantic_type=semantic_type,
                              title=title,
                              description=descr)
        sp.insert()

        # register the maps
        fs = "|"
        register_maps_in_space_time_dataset(
            type=sp.get_new_map_instance(None).get_type(),
            name=output,
            file=new_list_file_name,
            start="file",
            end="file",
            unit=relative_time_unit,
            dbif=None,
            fs=fs,
            update_cmd_list=False)

        os.chdir(old_cwd)
    except:
        raise

    # Make sure the location is switched back correctly
    finally:
        if location:
            # Switch to the old location
            try:
                gscript.run_command("g.mapset",
                                    mapset=old_env["MAPSET"],
                                    location=old_env["LOCATION_NAME"],
                                    gisdbase=old_env["GISDBASE"])
            except CalledModuleError:
                grass.warning(_("Switching to original location failed"))

        gscript.set_raise_on_error(old_state)
Ejemplo n.º 40
0
    def import_cloud_masks(self, area_threshold, prob_threshold, output,
                           shadows, reproject):
        try:
            if os.environ["GRASS_OVERWRITE"] == "1":
                overwrite = True
        except Exception as e:
            overwrite = False

        # Import cloud masks for L2A products
        files_L2A = self._filter("MSK_CLDPRB_20m.jp2")

        for f in files_L2A:
            safe_dir = os.path.dirname(f).split(os.path.sep)[-4]
            items = safe_dir.split("_")

            # Define names of final & temporary maps
            map_name = "_".join([items[5], items[2], "MSK", "CLOUDS"])
            clouds_imported = "_".join([items[5], items[2], "cloudprob"])
            clouds_selected = "_".join([items[5], items[2], "clouds_selected"])
            shadows_imported = "_".join([items[5], items[2], "shadows"])
            shadows_selected = "_".join(
                [items[5], items[2], "shadows_selected"])
            mask_selected = "_".join([items[5], items[2], "mask_selected"])
            mask_cleaned = "_".join([items[5], items[2], "mask_cleaned"])

            self._map_list.extend([
                clouds_imported,
                clouds_selected,
                shadows_imported,
                shadows_selected,
                mask_selected,
                mask_cleaned,
            ])

            # check if mask alrady exist
            if gs.find_file(name=map_name,
                            element=output)["file"] and not overwrite:
                gs.message(
                    _("option <output>: <{}> exists. To overwrite, use the --overwrite flag"
                      .format(map_name)))
                continue

            try:
                # Import & Threshold cloud probability layer
                gs.message(
                    _("Importing cloud mask for {}").format("_".join(
                        [items[5], items[2]])))
                if reproject:
                    self._args["resolution_value"] = self._raster_resolution(f)
                    self._args["resample"] = "bilinear"
                    gs.run_command("r.import",
                                   input=f,
                                   output=clouds_imported,
                                   **self._args)
                else:
                    gs.run_command("r.in.gdal",
                                   input=f,
                                   output=clouds_imported,
                                   **self._args)
                gs.use_temp_region()
                gs.run_command("g.region", raster=clouds_imported)
                gs.mapcalc(
                    f"{clouds_selected} = if({clouds_imported} >= {prob_threshold}, 1, 0)"
                )
                gs.del_temp_region()

                # Add shadow mask
                if shadows:
                    try:
                        shadow_file = self._filter("_".join(
                            [items[5], items[2], "SCL_20m.jp2"]))
                        if reproject:
                            self._args[
                                "resolution_value"] = self._raster_resolution(
                                    shadow_file[0])
                            self._args["resample"] = "nearest"
                            gs.run_command(
                                "r.import",
                                input=shadow_file,
                                output=shadows_imported,
                                **self._args,
                            )
                        else:
                            gs.run_command(
                                "r.in.gdal",
                                input=shadow_file,
                                output=shadows_imported,
                                **self._args,
                            )

                        gs.use_temp_region()
                        gs.run_command("g.region", raster=shadows_imported)
                        gs.mapcalc(
                            f"{shadows_selected} = if({shadows_imported} == 3, 2, 0)"
                        )
                        gs.mapcalc(
                            f"{mask_selected} = max({shadows_selected},{clouds_selected})"
                        )
                        gs.del_temp_region()
                    except Exception as e:
                        gs.warning(
                            _("Unable to import shadows for {}. Error: {}").
                            format("_".join([items[5], items[2]]), e))

                else:
                    gs.run_command("g.rename",
                                   quiet=True,
                                   raster=(clouds_selected, mask_selected))

                gs.use_temp_region()
                gs.run_command("g.region", raster=mask_selected)

                # Cleaning small patches
                try:
                    gs.run_command(
                        "r.reclass.area",
                        input=mask_selected,
                        output=mask_cleaned,
                        value=area_threshold,
                        mode="greater",
                    )
                except Exception as e:
                    pass  # error already printed

                # Extract & Label clouds (and shadows)
                gs.run_command("r.null", map=mask_cleaned, setnull="0")

                labels = ["1:clouds", "2:shadows"]
                labelling = gs.feed_command("r.category",
                                            map=mask_cleaned,
                                            separator=":",
                                            rules="-")
                labelling.stdin.write("\n".join(labels).encode())
                labelling.stdin.close()
                labelling.wait()

                info_stats = gs.parse_command("r.stats",
                                              input=mask_cleaned,
                                              flags="p")

                # Create final cloud (and shadow) mask & display areal statistics
                if output == "vector":
                    gs.run_command(
                        "r.to.vect",
                        input=mask_cleaned,
                        output=map_name,
                        type="area",
                        flags="s",
                    )
                    colours = ["1 230:230:230", "2 60:60:60"]
                    colourise = gs.feed_command(
                        "v.colors",
                        map=map_name,
                        use="attr",
                        column="value",
                        rules="-",
                        quiet=True,
                    )
                    colourise.stdin.write("\n".join(colours).encode())
                    colourise.stdin.close()
                    colourise.wait()
                    gs.vector_history(map_name)

                else:
                    gs.run_command("g.rename",
                                   quiet=True,
                                   raster=(mask_cleaned, map_name))
                    colours = ["1 230:230:230", "2 60:60:60"]
                    colourise = gs.feed_command("r.colors",
                                                map=map_name,
                                                rules="-",
                                                quiet=True)
                    colourise.stdin.write("\n".join(colours).encode())
                    colourise.stdin.close()
                    colourise.wait()
                    gs.raster_history(map_name)

                gs.message(
                    _("Areal proportion of masked clouds:{}").format(
                        [key.split()[1] for key in info_stats][0]))
                if shadows:
                    if len(info_stats) > 2:
                        gs.message(
                            _("Areal proportion of masked shadows:{}").format(
                                [key.split()[1] for key in info_stats][1]))
                    else:
                        gs.message(_("Areal proportion of masked shadows:0%"))

                gs.del_temp_region()

            except Exception as e:
                gs.del_temp_region()
                gs.warning(
                    _("Unable to import cloud mask for {}. Error: {}").format(
                        "_".join([items[5], items[2]]), e))

        # Import of simplified cloud masks for Level-1C products
        all_files = self._filter("MSK_CLOUDS_B00.gml")
        files_L1C = []

        for f in all_files:
            safe_dir = os.path.dirname(f).split(os.path.sep)[-4]
            if safe_dir not in [
                    os.path.dirname(file).split(os.path.sep)[-4]
                    for file in files_L2A
            ]:
                files_L1C.append(f)

        if len(files_L1C) > 0:
            from osgeo import ogr

            for f in files_L1C:
                safe_dir = os.path.dirname(f).split(os.path.sep)[-4]
                items = safe_dir.split("_")

                # Define names of final & temporary maps
                map_name = "_".join([items[5], items[2], "MSK", "CLOUDS"])
                clouds_imported = "_".join(
                    [items[5], items[2], "clouds_imported"])
                mask_cleaned = "_".join([items[5], items[2], "mask_cleaned"])
                self._map_list.extend([clouds_imported, mask_cleaned])

                # check if mask alrady exist
                if (gs.find_file(name=map_name, element=output)["file"]
                        and not overwrite):
                    gs.fatal(
                        _("option <output>: <{}> exists. To overwrite, use the --overwrite flag"
                          .format(map_name)))
                    continue

                # check if any OGR layer
                dsn = ogr.Open(f)
                layer_count = dsn.GetLayerCount()
                dsn.Destroy()
                if layer_count < 1:
                    gs.info(
                        "No clouds layer found in <{}>. Import skipped".format(
                            f))
                    continue

                # Import cloud layer
                try:
                    gs.message(
                        _("Importing cloud mask for {}").format("_".join(
                            [items[5], items[2]])))
                    gs.run_command(
                        "v.import",
                        input=f,
                        output=clouds_imported,
                        extent=options["extent"],
                        flags="o" if flags["o"] else None,
                        quiet=True,
                    )

                    gs.use_temp_region()
                    gs.run_command("g.region", vector=clouds_imported)

                    # Cleaning small patches
                    gs.run_command(
                        "v.clean",
                        input=clouds_imported,
                        output=mask_cleaned,
                        tool="rmarea",
                        threshold=area_threshold,
                    )

                    # Calculating info stats
                    gs.run_command("v.db.addcolumn",
                                   map=mask_cleaned,
                                   columns="value_num INT")
                    gs.run_command("v.db.update",
                                   map=mask_cleaned,
                                   column="value_num",
                                   value=1)
                    gs.run_command(
                        "v.to.rast",
                        input=mask_cleaned,
                        output=mask_cleaned,
                        use="attr",
                        attribute_column="value_num",
                        quiet=True,
                    )
                    info_stats = gs.parse_command("r.stats",
                                                  input=mask_cleaned,
                                                  flags="p")

                    # Create final cloud mask output
                    if output == "vector":
                        gs.run_command("g.rename",
                                       quiet=True,
                                       vector=(mask_cleaned, map_name))
                        colours = ["1 230:230:230"]
                        colourise = gs.feed_command(
                            "v.colors",
                            map=map_name,
                            use="attr",
                            column="value_num",
                            rules="-",
                            quiet=True,
                        )
                        colourise.stdin.write("\n".join(colours).encode())
                        colourise.stdin.close()
                        colourise.wait()
                        gs.vector_history(map_name)

                    else:
                        gs.run_command("g.rename",
                                       quiet=True,
                                       raster=(mask_cleaned, map_name))
                        colours = ["1 230:230:230"]
                        colourise = gs.feed_command("r.colors",
                                                    map=map_name,
                                                    rules="-",
                                                    quiet=True)
                        colourise.stdin.write("\n".join(colours).encode())
                        colourise.stdin.close()
                        colourise.wait()
                        gs.raster_history(map_name)

                    # Display messages & statistics
                    if shadows:
                        gs.warning(
                            _("Level1 product: No shadow mask for {}").format(
                                "_".join([items[5], items[2]])))

                    gs.message(
                        _("Areal proportion of masked clouds:{}").format(
                            [key.split()[1] for key in info_stats][0]))

                    gs.del_temp_region()

                except Exception as e:
                    gs.del_temp_region()
                    gs.warning(
                        _("Unable to import cloud mask for {}. Error: {}").
                        format("_".join([items[5], items[2]]), e))
Ejemplo n.º 41
0
def main():
    '''
    begin main

    '''

    global soillossbare
    global outprefix
    global flowaccmethod
    global elevation
    global kfactor
    global rfactor
    global resolution

    global flowacc
    global fieldblock
    global fieldblockvect

    global flag_r

    global quiet
    global options
    global flags


    soillossbare = options['soillossbare']

    outprefix = soillossbare + '_'
    flowaccmethod = options['flowaccmethod']
    flowacc = options['flowacc']
    elevation = options['elevation']
    kfactor = options['kfactor']
    rfactor = options['rfactor']
    resolution = options['resolution']
    #fieldblock = None
    fieldblock = options['fieldblock']
    fieldblockvect = options['map']

    flag_r = flags['r']     # remove temp maps

    quiet = True
    if gscript.verbosity() > 2:
        quiet=False

    gscript.run_command("g.region", flags="a", res=resolution)

    if flowacc:
        gscript.info("Using flowaccumulation from input raster map %s ..." %flowacc)
        ruslealg = rusle_base()

    elif flowaccmethod == "r.terraflow":
        gscript.info("Using flowaccumulation from module r.terraflow (MFD) ...")
        ruslealg = rusle_terraflow()

    elif flowaccmethod == "r.flow":
        gscript.info("Using flowaccumulation from module r.flow (SFD) ...")
        ruslealg = rusle_flow()

    elif flowaccmethod == "r.watershed":
        gscript.info("Using flowaccumulation from module r.watershed (MFD) ...")
        ruslealg = rusle_watershed()

    p = ruslealg.rusle()

    if flag_r:
        #remove = ruslealg.removeTempRasters() # already in destructor
        pass

    return