Exemplo n.º 1
0
def manage_map_band_reference(name, band_ref):
    """Manage band reference assigned to a single raster map

    :param str name: raster map name
    :param str band_ref: band reference (None for dissociating band reference)

    :return int: return code
    """
    from grass.pygrass.raster import RasterRow

    try:
        with RasterRow(name) as rast:
            if band_ref:
                gs.debug(
                    _("Band reference <{}> assigned to raster map <{}>").
                    format(band_ref, name), 1)
            else:
                gs.debug(
                    _("Band reference dissociated from raster map <{}>").
                    format(name), 1)
            try:
                rast.info.band_reference = band_ref
            except GrassError as e:
                gs.error(
                    _("Unable to assign/dissociate band reference. {}").format(
                        e))
                return 1
    except OpenError as e:
        gs.error(_("Map <{}> not found in current mapset").format(name))
        return 1

    return 0
Exemplo n.º 2
0
def import_fields(res=None, tile=None):
    """Import requested fields for a given tile or resolution."""

    # parse needed options
    fields = grass_str_list(options['fields'])
    layers = grass_int_list(options['layers'])

    # compute region extents
    region = region_extents(res=res, tile=tile)

    # for each of the requested fields
    for field in fields:

        # alt is a special case since there is only one layer
        if field == 'alt':
            import_layer('alt', region, res=res, tile=tile)

        # bio is a bit of a special case too since there are 18 layers
        elif field == 'bio':
            for layer in (layers if layers else range(1, 19)):
                import_layer(field, region, layer=layer, res=res, tile=tile)

        # other fields have 12 layers
        else:
            for layer in (layers if layers else range(1, 13)):
                if layer > 12:
                    grass.error("No layer '%d' for field '%s'"
                                % (layer, field))
                else:
                    import_layer(field, region,
                                 layer=layer, res=res, tile=tile)
Exemplo n.º 3
0
def main(options, flags):
    xnames = options["input"]
    coef_pref = options["coef_prefix"]
    timevar_pref = options["timevar_prefix"]
    result_pref = options["result_prefix"]
    freq = options["freq"]
    freq = [float(f) for f in freq.split(",")]

    xnames = xnames.split(",")

    N = len(xnames)
    if len(freq) >= (N - 1) / 2:
        grass.error(
            "Count of used harmonics is to large. Reduce the paramether.")
        sys.exit(1)

    const_name, time_names, harm_names = generate_vars(N, freq, timevar_pref)

    settings_name = uuid.uuid4().hex
    settings = open(settings_name, "w")
    _generate_sample_descr(settings, freq, xnames, const_name, time_names,
                           harm_names)
    settings.close()
    regression(settings_name, coef_pref)
    inverse_transform(settings_name, coef_pref, result_pref)
    os.unlink(settings_name)
Exemplo n.º 4
0
    def __init__(self, headers, Y, X, prefix, restype='FCELL'):
        """Linear Least Square model  Y = X * b + e
        X are [[],.., []] of raster names
        Y are [] of raster names

        b are rasters of the regression coeficients, the rasters
            have type restype
        e are the errors of the model (the class doesn't compute e)

        header is [] of the variable names.
        names of the 'b' rasters are constructed as
            prefix+variable_name (see header)
        """
        if len(set(headers)) != len(headers):
            grass.error('The names of the variables are not unique!')

        self.mtype = restype

        self.x_headers = headers[1:]  # Names of the coefficient
        self.b_names = [prefix + name for name in self.x_headers]
        self.y_names = Y  # Names of Y rasters
        self.x_names = X  # Names of X rasters

        self.sample_count = len(self.y_names)
        self.factor_count = len(self.x_names[0])

        self._y_rasters = []
        self._x_rasters = []
        self._b_rasters = []
        self._init_rasters()
Exemplo n.º 5
0
    def _read_timestamp_from_mtd_file(mtd_file):
        try:
            from xml.etree import ElementTree
            from datetime import datetime
        except ImportError as e:
            gs.fatal(_("Unable to parse metadata file. {}").format(e))

        timestamp = None
        with io.open(mtd_file, encoding='utf-8') as fd:
            root = ElementTree.fromstring(fd.read())
            nsPrefix = root.tag[:root.tag.index('}') + 1]
            nsDict = {'n1': nsPrefix[1:-1]}
            node = root.find('n1:General_Info', nsDict)
            if node is not None:
                try:
                    # check S2
                    is_s2 = node.find('TILE_ID', nsDict).text.startswith('S2')
                    if not is_s2:
                        gs.fatal(
                            _("Register file can be created only for Sentinel-2 data."
                              ))

                    # get timestamp
                    ts_str = node.find('SENSING_TIME', nsDict).text
                    timestamp = datetime.strptime(ts_str,
                                                  "%Y-%m-%dT%H:%M:%S.%fZ")
                except AttributeError:
                    # error is reported below
                    pass

        if not timestamp:
            gs.error(
                _("Unable to determine timestamp from <{}>").format(mtd_file))

        return timestamp
Exemplo n.º 6
0
def main():
    options, flags = grass.parser()
    bt1 = options['ainput']
    bt2 = options['binput']
    basename = options['basename']
    output = basename + '_lswt'
    satellite = options['satellite']
    c0 = coeffs.get(satellite).get('c0')[0]
    c1 = coeffs.get(satellite).get('c1')[0]
    c2 = coeffs.get(satellite).get('c2')[0]
    coeff = flags['i']
    if coeff:
        grass.message("Split window coefficients for {satellite} are "
                      "c0={c0};c1={c1};c2={c2}".format(satellite=satellite,
                                                       c0=c0,
                                                       c1=c1,
                                                       c2=c2))
        return
    elif (bool(bt1) == 0) or (bool(bt2) == 0) or (bool(basename) == 0):
        # logging.error('error: ', message)
        grass.error('in1, in2 and basename are required for computing lswt')
    else:
        grass.message("Computing water surface temperature - Remember to set"
                      " water mask: Output file is {basename}_lswt".format(
                          basename=basename))
        # Split window equation for water surface
        grass.mapcalc(exp="{out} = {bt1} + {c1} * ({bt1} - {bt2}) + {c2} *"
                      " ({bt1} - {bt2})^2 + {c0}".format(
                          out=output, bt1=bt1, bt2=bt2, c0=c0, c1=c1, c2=c2))
Exemplo n.º 7
0
    def __init__(self, headers, Y, X, prefix, restype='FCELL'):
        """Linear Least Square model  Y = X * b + e
        X are [[],.., []] of raster names
        Y are [] of raster names

        b are rasters of the regression coeficients, the rasters
            have type restype
        e are the errors of the model (the class doesn't compute e)

        header is [] of the variable names.
        names of the 'b' rasters are constructed as
            prefix+variable_name (see header)
        """
        if len(set(headers)) != len(headers):
            grass.error('The names of the variables are not unique!')

        self.mtype = restype

        self.x_headers = headers[1:]    # Names of the coefficient
        self.b_names = [prefix + name for name in self.x_headers]
        self.y_names = Y                # Names of Y rasters
        self.x_names = X                # Names of X rasters

        self.sample_count = len(self.y_names)
        self.factor_count = len(self.x_names[0])

        self._y_rasters = []
        self._x_rasters = []
        self._b_rasters = []
        self._init_rasters()
Exemplo n.º 8
0
def get_extensions():
    addon_base = os.getenv("GRASS_ADDON_BASE")
    if not addon_base:
        gscript.fatal(_("%s not defined") % "GRASS_ADDON_BASE")
    fXML = os.path.join(addon_base, "modules.xml")
    if not os.path.exists(fXML):
        return []

    # read XML file
    fo = open(fXML, "r")
    try:
        tree = etree.fromstring(fo.read())
    except Exception as e:
        gscript.error(_("Unable to parse metadata file: %s") % e)
        fo.close()
        return []

    fo.close()

    libgis_rev = gscript.version()["libgis_revision"]
    ret = list()
    for tnode in tree.findall("task"):
        gnode = tnode.find("libgis")
        if gnode is not None and gnode.get("revision", "") != libgis_rev:
            ret.append(tnode.get("name"))

    return ret
Exemplo n.º 9
0
def get_extensions():
    addon_base = os.getenv('GRASS_ADDON_BASE')
    if not addon_base:
        grass.fatal(_("%s not defined") % "GRASS_ADDON_BASE")
    fXML = os.path.join(addon_base, 'modules.xml')
    if not os.path.exists(fXML):
        return []

    # read XML file
    fo = open(fXML, 'r')
    try:
        tree = etree.fromstring(fo.read())
    except StandardError as e:
        grass.error(_("Unable to parse metadata file: %s") % e)
        fo.close()
        return []
    
    fo.close()
    
    libgis_rev = grass.version()['libgis_revision']
    ret = list()
    for tnode in tree.findall('task'):
        gnode = tnode.find('libgis')
        if gnode is not None and \
                gnode.get('revision', '') != libgis_rev:
            ret.append(tnode.get('name'))
    
    return ret
Exemplo n.º 10
0
def manage_map_semantic_label(name, semantic_label):
    """Manage semantic label assigned to a single raster map

    :param str name: raster map name
    :param str semantic_label: semantic label (None for dissociating semantic label)

    :return int: return code
    """
    from grass.pygrass.raster import RasterRow

    try:
        with RasterRow(name) as rast:
            if semantic_label:
                gs.debug(
                    _("Semantic label <{}> assigned to raster map <{}>").format(
                        semantic_label, name
                    ),
                    1,
                )
            else:
                gs.debug(
                    _("Semantic label dissociated from raster map <{}>").format(name), 1
                )
            try:
                rast.info.semantic_label = semantic_label
            except GrassError as e:
                gs.error(_("Unable to assign/dissociate semantic label. {}").format(e))
                return 1
    except OpenError as e:
        gs.error(_("Map <{}> not found in current mapset").format(name))
        return 1

    return 0
 def download(self):
     """ Download Landsat-8 scenes """
     
     for scene in self._scenes:
             try:
                 fname = scene.download(key=options['file_key'], path=options['output'], overwrite=True)[options['file_key']]
                 message(str(fname) + "... Done") 
             except:
                 error(str(fname) + "... Failed")
def checkPath(path):
    if os.path.exists(path):
        return 0
    else:
        try:
            os.mkdir(path)
            return 0
        except:
            grass.error(_("The path '{st}' doesn't exists".format(st=path)))
            return 1
Exemplo n.º 13
0
def checkPath(path):
    if os.path.exists(path):
        return 0
    else:
        try:
            os.mkdir(path)
            return 0
        except:
            grass.error(_("The path '{st}' doesn't exists".format(st=path)))
            return 1
Exemplo n.º 14
0
 def printMessage(self, message, type='info'):
     """Call grass message function corresponding to type."""
     if type == 'error':
         grass.error(message)
     elif type == 'warning':
         grass.warning(message)
     elif type == 'info' and grass.gisenv()['GRASS_VERBOSE'] > 0:
         grass.info(message)
     if self.logOutput is True:
         self.__writeLog(message)
Exemplo n.º 15
0
def main():
    """Main function, called at execution time."""

    # parse options to import layers
    variables = options['variables'].split(',')
    if options['bioclim']:
        bioclim = map(int, options['bioclim'].split(','))
        if not all(1 <= x <= 19 for x in bioclim):
            grass.warning("Values for 'bioclim' need to be within the "
                          "range 1-19. Ignoring values outside this range")
    else:
        bioclim = range(1, 20)

    if options['months']:
        months = map(int, options['months'].split(','))
        if not all(1 <= x <= 12 for x in months):
            grass.warning("Values for 'months' need to be within the range"
                          " 1-12. Ignoring values outside this range")
    else:
        months = range(1, 13)

    allres = options['res'].split(',')

    # import tiles
    if options['tiles']:
        tiles = options['tiles'].split(',')
        legaltiles = [str(j) + str(i) for j in range(5) for i in range(12)]
        for t in tiles:
            if t not in legaltiles:
                grass.error("Tile {} is not a valid WorldClim tile, see "
                            "http://www.worldclim.org/tiles.php".format(t))
        for tile in tiles:
            import_variables(tile=tile,
                             variables=variables,
                             bioclim=bioclim,
                             months=months)

        # Merge tiles
        if not flags['p']:
            merge_tiles(variables=variables,
                        tiles=tiles,
                        bioclim=bioclim,
                        months=months)

    # import global datasets
    if allres != ['']:
        for res in allres:
            import_variables(res=res,
                             variables=variables,
                             bioclim=bioclim,
                             months=months)
Exemplo n.º 16
0
def main():
    map = options['map']
    layer = options['layer']
    columns = options['columns']
    columns = [col.strip() for col in columns.split(',')]

    # does map exist in CURRENT mapset?
    mapset = grass.gisenv()['MAPSET']
    exists = bool(
        grass.find_file(map, element='vector', mapset=mapset)['file'])

    if not exists:
        grass.fatal(_("Vector map <%s> not found in current mapset") % map)

    try:
        f = grass.vector_db(map)[int(layer)]
    except KeyError:
        grass.fatal(
            _("There is no table connected to this map. Run v.db.connect or v.db.addtable first."
              ))

    table = f['table']
    database = f['database']
    driver = f['driver']
    column_existing = grass.vector_columns(map, int(layer)).keys()

    for col in columns:
        if not col:
            grass.fatal(
                _("There is an empty column. Did you leave a trailing comma?"))
        col_name = col.split(' ')[0].strip()
        if col_name in column_existing:
            grass.error(
                _("Column <%s> is already in the table. Skipping.") % col_name)
            continue
        grass.verbose(_("Adding column <%s> to the table") % col_name)
        p = grass.feed_command('db.execute',
                               input='-',
                               database=database,
                               driver=driver)
        res = "ALTER TABLE {} ADD COLUMN {}".format(table, col)
        p.stdin.write(encode(res))
        grass.debug(res)
        p.stdin.close()
        if p.wait() != 0:
            grass.fatal(_("Unable to add column <%s>.") % col)

    # write cmd history:
    grass.vector_history(map)
Exemplo n.º 17
0
def get_extensions():
    addon_base = os.getenv('GRASS_ADDON_PATH')
    if not addon_base:
        grass.fatal(_("%s not defined") % "GRASS_ADDON_PATH")
    fXML = os.path.join(addon_base, 'modules.xml')
    if not os.path.exists(fXML):
        return []

    # read XML file
    fo = open(fXML, 'r')
    try:
        tree = etree.fromstring(fo.read())
    except StandardError, e:
        grass.error(_("Unable to parse metadata file: %s") % e)
        fo.close()
        return []
Exemplo n.º 18
0
def print_map_semantic_label(name, label_reader):
    """Print semantic label information assigned to a single raster map

    :param str name: raster map name
    """
    from grass.pygrass.raster import RasterRow

    try:
        with RasterRow(name) as rast:
            semantic_label = rast.info.semantic_label
            if semantic_label:
                label_reader.print_info(semantic_label)
            else:
                gs.info(_("No semantic label assigned to <{}>").format(name))
    except OpenError as e:
        gs.error(_("Map <{}> not found").format(name))
Exemplo n.º 19
0
def main():
    remove = options['operation'] == 'remove'
    if remove or flags['f']:
        extensions = gscript.read_command('g.extension', quiet=True,
                                          flags='a').splitlines()
    else:
        extensions = get_extensions()

    if not extensions:
        if remove:
            gscript.info(_("No extension found. Nothing to remove."))
        else:
            gscript.info(
                _("Nothing to rebuild. Rebuilding process can be forced with -f flag."
                  ))
        return 0

    if remove and not flags['f']:
        gscript.message(_("List of extensions to be removed:"))
        print(os.linesep.join(extensions))
        gscript.message(
            _("You must use the force flag (-f) to actually remove them. Exiting."
              ))
        return 0

    for ext in extensions:
        gscript.message('-' * 60)
        if remove:
            gscript.message(_("Removing extension <%s>...") % ext)
        else:
            gscript.message(_("Reinstalling extension <%s>...") % ext)
        gscript.message('-' * 60)
        if remove:
            operation = 'remove'
            operation_flags = 'f'
        else:
            operation = 'add'
            operation_flags = ''
        try:
            gscript.run_command('g.extension',
                                flags=operation_flags,
                                extension=ext,
                                operation=operation)
        except CalledModuleError:
            gscript.error(_("Unable to process extension:%s") % ext)

    return 0
Exemplo n.º 20
0
def main():
    remove = options["operation"] == "remove"
    if remove or flags["f"]:
        extensions = gscript.read_command("g.extension", quiet=True,
                                          flags="a").splitlines()
    else:
        extensions = get_extensions()

    if not extensions:
        if remove:
            gscript.info(_("No extension found. Nothing to remove."))
        else:
            gscript.info(
                _("Nothing to rebuild. Rebuilding process can be forced with -f flag."
                  ))
        return 0

    if remove and not flags["f"]:
        gscript.message(_("List of extensions to be removed:"))
        print(os.linesep.join(extensions))
        gscript.message(
            _("You must use the force flag (-f) to actually remove them. Exiting."
              ))
        return 0

    for ext in find_addon_name(addons=extensions):
        gscript.message("-" * 60)
        if remove:
            gscript.message(_("Removing extension <%s>...") % ext)
        else:
            gscript.message(_("Reinstalling extension <%s>...") % ext)
        gscript.message("-" * 60)
        if remove:
            operation = "remove"
            operation_flags = "f"
        else:
            operation = "add"
            operation_flags = ""
        try:
            gscript.run_command("g.extension",
                                flags=operation_flags,
                                extension=ext,
                                operation=operation)
        except CalledModuleError:
            gscript.error(_("Unable to process extension:%s") % ext)

    return 0
Exemplo n.º 21
0
def main():
    """Main function, called at execution time."""

    # parse options to import layers
    variables = options['variables'].split(',')
    if options['bioclim']:
        bioclim = map(int, options['bioclim'].split(','))
        if not all(1 <= x <= 19 for x in bioclim):
            grass.warning("Values for 'bioclim' need to be within the "
                          "range 1-19. Ignoring values outside this range")
    else:
        bioclim = range(1, 20)

    if options['months']:
        months = map(int, options['months'].split(','))
        if not all(1 <= x <= 12 for x in months):
            grass.warning("Values for 'months' need to be within the range"
                          " 1-12. Ignoring values outside this range")
    else:
        months = range(1, 13)

    allres = options['res'].split(',')

    # import tiles
    if options['tiles']:
        tiles = options['tiles'].split(',')
        legaltiles = [str(j)+str(i) for j in range(5) for i in range(12)]
        for t in tiles:
            if t not in legaltiles:
                grass.error("Tile {} is not a valid WorldClim tile, see "
                            "http://www.worldclim.org/tiles.php"
                            .format(t))
        for tile in tiles:
            import_variables(tile=tile, variables=variables,
                             bioclim=bioclim, months=months)

        # Merge tiles
        if not flags['p']:
            merge_tiles(variables=variables, tiles=tiles, bioclim=bioclim,
                        months=months)

    # import global datasets
    if allres != ['']:
        for res in allres:
            import_variables(res=res, variables=variables,
                             bioclim=bioclim, months=months)
Exemplo n.º 22
0
def print_map_band_reference(name, band_reader):
    """Print band reference information assigned to a single raster map

    :param str name: raster map name
    """
    from grass.pygrass.raster import RasterRow

    try:
        with RasterRow(name) as rast:
            band_ref = rast.info.band_reference
            if band_ref:
                shortcut, band = band_ref.split('_')
                band_reader.print_info(shortcut, band)
            else:
                gs.info(_("No band reference assigned to <{}>").format(name))
    except OpenError as e:
        gs.error(_("Map <{}> not found").format(name))
Exemplo n.º 23
0
def main():
    remove = options['operation'] == 'remove'
    if remove or flags['f']:
        extensions = gscript.read_command(
            'g.extension',
            quiet=True,
            flags='a').splitlines()
    else:
        extensions = get_extensions()

    if not extensions:
        if remove:
            gscript.info(_("No extension found. Nothing to remove."))
        else:
            gscript.info(
                _("Nothing to rebuild. Rebuilding process can be forced with -f flag."))
        return 0

    if remove and not flags['f']:
        gscript.message(_("List of extensions to be removed:"))
        print(os.linesep.join(extensions))
        gscript.message(
            _("You must use the force flag (-f) to actually remove them. Exiting."))
        return 0

    for ext in extensions:
        gscript.message('-' * 60)
        if remove:
            gscript.message(_("Removing extension <%s>...") % ext)
        else:
            gscript.message(_("Reinstalling extension <%s>...") % ext)
        gscript.message('-' * 60)
        if remove:
            operation = 'remove'
            operation_flags = 'f'
        else:
            operation = 'add'
            operation_flags = ''
        try:
            gscript.run_command('g.extension', flags=operation_flags,
                                extension=ext, operation=operation)
        except CalledModuleError:
            gscript.error(_("Unable to process extension:%s") % ext)

    return 0
Exemplo n.º 24
0
def main():
    map = options['map']
    layer = options['layer']
    columns = options['columns']
    columns = [col.strip() for col in columns.split(',')]

    # does map exist in CURRENT mapset?
    mapset = grass.gisenv()['MAPSET']
    exists = bool(grass.find_file(map, element='vector', mapset=mapset)['file'])

    if not exists:
        grass.fatal(_("Vector map <%s> not found in current mapset") % map)

    try:
        f = grass.vector_db(map)[int(layer)]
    except KeyError:
        grass.fatal(
            _("There is no table connected to this map. Run v.db.connect or v.db.addtable first."))

    table = f['table']
    database = f['database']
    driver = f['driver']
    column_existing = grass.vector_columns(map, int(layer)).keys()

    for col in columns:
        if not col:
            grass.fatal(_("There is an empty column. Did you leave a trailing comma?"))
        col_name = col.split(' ')[0].strip()
        if col_name in column_existing:
            grass.error(_("Column <%s> is already in the table. Skipping.") % col_name)
            continue
        grass.verbose(_("Adding column <%s> to the table") % col_name)
        p = grass.feed_command('db.execute', input='-', database=database, driver=driver)
        p.stdin.write("ALTER TABLE %s ADD COLUMN %s" % (table, col))
        grass.debug("ALTER TABLE %s ADD COLUMN %s" % (table, col))
        p.stdin.close()
        if p.wait() != 0:
            grass.fatal(_("Unable to add column <%s>.") % col)

    # write cmd history:
    grass.vector_history(map)
Exemplo n.º 25
0
    def set_uuid(self, uuid_list):
        """Set products by uuid.

        TODO: Find better implementation

        :param uuid: uuid to download
        """
        if self._apiname == "USGS_EE":
            self.get_products_from_uuid_usgs(uuid_list)
        else:
            from sentinelsat.sentinel import SentinelAPIError

            self._products_df_sorted = {"uuid": []}
            for uuid in uuid_list:
                try:
                    odata = self._api.get_product_odata(uuid, full=True)
                except SentinelAPIError as e:
                    gs.error(_("{0}. UUID {1} skipped".format(e, uuid)))
                    continue

                for k, v in odata.items():
                    if k == "id":
                        k = "uuid"
                    elif k == "Sensing start":
                        k = "beginposition"
                    elif k == "Product type":
                        k = "producttype"
                    elif k == "Cloud cover percentage":
                        k = "cloudcoverpercentage"
                    elif k == "Identifier":
                        k = "identifier"
                    elif k == "Ingestion Date":
                        k = "ingestiondate"
                    elif k == "footprint":
                        pass
                    else:
                        continue
                    if k not in self._products_df_sorted:
                        self._products_df_sorted[k] = []
                    self._products_df_sorted[k].append(v)
Exemplo n.º 26
0
    def set_uuid(self, uuid_list):
        """Set products by uuid.

        TODO: Find better implementation

        :param uuid: uuid to download
        """
        if self._apiname == 'USGS_EE':
            self.get_products_from_uuid_usgs(uuid_list)
        else:
            from sentinelsat.sentinel import SentinelAPIError

            self._products_df_sorted = {'uuid': []}
            for uuid in uuid_list:
                try:
                    odata = self._api.get_product_odata(uuid, full=True)
                except SentinelAPIError as e:
                    gs.error(_("{0}. UUID {1} skipped".format(e, uuid)))
                    continue

                for k, v in odata.items():
                    if k == 'id':
                        k = 'uuid'
                    elif k == 'Sensing start':
                        k = 'beginposition'
                    elif k == 'Product type':
                        k = 'producttype'
                    elif k == 'Cloud cover percentage':
                        k = 'cloudcoverpercentage'
                    elif k == 'Identifier':
                        k = 'identifier'
                    elif k == 'Ingestion Date':
                        k = 'ingestiondate'
                    elif k == 'footprint':
                        pass
                    else:
                        continue
                    if k not in self._products_df_sorted:
                        self._products_df_sorted[k] = []
                    self._products_df_sorted[k].append(v)
Exemplo n.º 27
0
def main():

    options, flags = gscript.parser()
    input = options['input']
    output = options['output']

    if (output is None or output == ""):
        gscript.error(_("[h.in] ERROR: output is a mandatory parameter."))
        exit()

    # Load HexASCII raster into memory
    hexASCII = HASC()
    try:
        hexASCII.loadFromFile(input)
    except (ValueError, IOError) as ex:
        gscript.error(
            _("[h.in] ERROR: Failed to load raster %s: %s" % (input, ex)))
        exit()

    # Set region (note that it is tricking GRASS to think it is a squared raster)
    gscript.run_command('g.region',
                        rows=hexASCII.nrows,
                        cols=hexASCII.ncols,
                        res=hexASCII.side)

    # Create RasterRow object and iterate trough rows
    newRast = raster.RasterRow(output)
    newRast.open('w', 'FCELL')
    for row in range(0, hexASCII.nrows):
        newRow = Buffer(shape=(1, hexASCII.ncols))  #, dtype=float, order='F')
        for col in range(0, hexASCII.ncols):
            newRow[0, col] = hexASCII.get(col, row)
        gscript.message(_("[h.in] DEBUG: Importing row: %s" % newRow))
        newRast.put_row(newRow)
    gscript.message(_("[h.in] DEBUG: Imported raster: %s" % (newRast)))

    # Close RasterRow to force its creation
    newRast.close()

    gscript.message(_("[h.in] SUCCESS: HexASCII raster imported."))
Exemplo n.º 28
0
def main(options, flags):
    xnames = options['input']
    coef_pref = options['coef_prefix']
    timevar_pref = options['timevar_prefix']
    result_pref = options['result_prefix']
    freq = options['freq']
    freq = [float(f) for f in freq.split(',')]

    xnames = xnames.split(',')

    N = len(xnames)
    if len(freq) >= (N-1)/2:
        grass.error("Count of used harmonics is to large. Reduce the paramether.")
        sys.exit(1)

    const_name, time_names, harm_names = generate_vars(N, freq, timevar_pref)

    settings_name = uuid.uuid4().hex
    settings = open(settings_name, 'w')
    _generate_sample_descr(settings, freq, xnames, const_name, time_names, harm_names)
    settings.close()
    regression(settings_name, coef_pref)
    inverse_transform(settings_name, coef_pref, result_pref)
    os.unlink(settings_name)
Exemplo n.º 29
0
def main():

    options, flags = gscript.parser()
    input = options['input']
    output = options['output']

    if (input is None or input == ""):
        gscript.error(_("[h.out] ERROR: input is a mandatory parameter."))
        exit()

    exists = False
    maps_list = utils.findmaps(type='raster')
    for map in maps_list:
        if input == map[0]:
            exists = True
            break
    if (not exists):
        gscript.error(_("[h.out] ERROR: could not find input map."))
        exit()

    if (output is None or output == ""):
        gscript.error(_("[h.out] ERROR: output is a mandatory parameter."))
        exit()

    rast = raster.RasterRow(input)
    # Set region (note that it is tricking GRASS to think it is a squared raster)
    info = gscript.read_command('r.info', map=input, flags='g')

    info = info.split("\n")
    print(info[6])

    hexASCII = HASC()
    # This can probably be set from the RasterRow object
    hexASCII.init(
        int(info[7].split("=")[1]),  #ncols, 
        int(info[6].split("=")[1]),  #nrows, 
        int(info[3].split("=")[1]),  #xll, 
        int(info[1].split("=")[1]),  #yll, 
        "NA")  #nodata)

    r = 0
    rast.open()
    for row in rast:
        for c in range(0, rast.info.cols):
            hexASCII.set(c, r, row[c])
        gscript.message(_("[h.in] DEBUG: Exporting row: %s" % newRow))
        r = r + 1

    gscript.message(_("[h.out] SUCCESS: HexASCII raster exported."))
Exemplo n.º 30
0
def main():

    # parameters - file name and extension
    outputfile = options['file']
    ext = outputfile.split('.')
    if len(ext) == 1:
        grass.fatal("Please provide the file extension of the output file")
    filetype = options['filetype']
    if filetype == 'cairo':
        allowed = ('.png', '.bmp', 'ppm', 'pdf', 'ps', 'svg')
        if not outputfile.lower().endswith(allowed):
            grass.fatal("Unknown display driver <{}>".format(ext[1]))
    if filetype == "ps" and not ext[1] == "ps":
        grass.fatal("The file type <{}> does not match the file extension <"
                    "{}>".format(filetype, ext[1]))
    if filetype == "png" and not ext[1] == "png":
        grass.fatal("The file type <{}> does not match the file extension <"
                    "{}>".format(filetype, ext[1]))

    # parameters - image settings
    unit = options['unit']
    resol = options['resolution']
    if resol == '':
        if unit == 'px':
            resol = 96
        else:
            resol = 300
    else:
        resol = int(resol)
    dimensions = options['dimensions']
    width, height = dimensions.split(",")
    bgcolor = options['color']
    inmap = options['raster']
    labelnum = options['labelnum']
    vr = options['range']
    font = options['font']
    fontsize = int(options['fontsize'])
    digits = int(options['digits'])
    labval = options['label_values']
    labstep = options['label_step']

    # flag parameters
    flag_f = flags['f']
    flag_d = flags['d']
    flag_t = flags['t']
    if flag_t:
        tagmargin = 9
    else:
        tagmargin = 4

    # Compute output size of legend bar in pixels
    if unit == 'cm':
        bw = math.ceil(float(width) / 2.54 * float(resol))
        bh = math.ceil(float(height) / 2.54 * float(resol))
    elif unit == 'mm':
        bw = math.ceil(float(width) / 25.4 * float(resol))
        bh = math.ceil(float(height) / 25.4 * float(resol))
    elif unit == 'inch':
        bw = math.ceil(float(width) * float(resol))
        bh = math.ceil(float(height) * float(resol))
    elif unit == "px":
        bw = float(width)
        bh = float(height)
    else:
        grass.error('Unit must be inch, cm, mm or px')

    # Add size of legend to w or h, if flag_d is set
    # Add size of tics
    if flag_d:
        histmargin = 2.75
    else:
        histmargin = 1
    if float(height) > float(width):
        w = bw * histmargin + tagmargin
        h = bh + 4
    else:
        h = bh * histmargin + tagmargin
        w = bw + 4

    # Determine image width and height
    if fontsize == 0:
        fz = 1
    else:
        fz = round(float(fontsize) * (float(resol) / 72.272))

    # Determine space at left and right (or top and bottom)
    # based on fontsize (fz) and number of digits
    maprange = grass.raster_info(inmap)
    maxval = round(maprange['max'], digits)
    minval = round(maprange['min'], digits)
    if maxval < 1:
        maxl = len(str(maxval)) - 1
    else:
        maxl = len(str(maxval)) - 2
    if minval < 1:
        minl = len(str(minval)) - 1
    else:
        minl = len(str(minval)) - 2
    margin_left = 0.5 * minl * fz
    margin_right = 0.5 * maxl * fz

    # Page width and height (iw, ih)
    # Position bar in percentage (*margin)
    # Here we take into account the extra space for the numbers and ticks

    if float(height) > float(width):
        iw = w + fz * maxl
        ih = h + margin_left + margin_right
        bmargin = str(margin_left / ih * 100)
        tmargin = str(100 - (margin_right / ih * 100))
        rmargin = str(100 * (w - tagmargin) / iw - 1)
        if flag_d:
            lmargin = str((2 + (bw * 1.75)) / iw * 100)
        else:
            lmargin = str(2 / iw * 100)
    else:
        iw = w + margin_left + margin_right
        ih = h + fz * 1.5
        bmargin = str((2 + tagmargin + fz * 1.5) / ih * 100)
        if flag_d:
            tmargin = str(100 - (2 + (bh * 1.75)) / ih * 100)
        else:
            tmargin = str(100 - 2 / ih * 100)
        lmargin = str(margin_left / iw * 100)
        rmargin = str(100 - margin_right / iw * 100)
    at = (bmargin, tmargin, lmargin, rmargin)

    # Open file connection, set font
    os.environ['GRASS_RENDER_IMMEDIATE'] = filetype
    os.environ['GRASS_RENDER_FILE'] = outputfile
    os.environ['GRASS_RENDER_HEIGHT'] = str(ih)
    os.environ['GRASS_RENDER_WIDTH'] = str(iw)
    if bgcolor == 'none':
        os.environ['GRASS_RENDER_TRANSPARENT'] = "TRUE"
    else:
        os.environ['GRASS_RENDER_BACKGROUNDCOLOR'] = bgcolor
    if flag_f and fontsize == 0:
        flag = 'cfsv'
    elif flag_f:
        flag = 'fsv'
    elif fontsize == 0:
        flag = 'csv'
    else:
        flag = 'sv'
    if flag_d:
        flag = flag + 'd'
    if flag_t:
        flag = flag + 't'

    # Write legend with various options
    d_legend = Module("d.legend",
                      flags=flag,
                      raster=inmap,
                      font=font,
                      at=at,
                      fontsize=fz,
                      labelnum=labelnum,
                      run_=False)
    if vr:
        val_range = list(map(float, vr.split(',')))
        d_legend.inputs.range = val_range
    if labval:
        label_values = list(map(float, labval.split(',')))
        d_legend.inputs.label_values = label_values
    if labstep:
        label_step = float(labstep)
        d_legend.inputs.label_step = label_step
    d_legend.run()

    # Set image resolution
    if found and outputfile.lower().endswith(('.png', '.bmp')):
        im = Image.open(outputfile)
        im.save(outputfile, dpi=(resol, resol))

    # Provide informatie about image on standard output
    grass.message("----------------------------\n")
    grass.message("File saved as {}".format(outputfile))
    grass.message("The image dimensions are:\n")
    grass.message("{} px wide and {} px heigh\n".format(
        str(int(iw)), str(int(ih))))
    if unit == 'inch':
        wr = round(iw / resol, 3)
        hr = round(ih / resol, 3)
    elif unit == 'cm':
        wr = round(iw / resol * 2.54, 3)
        hr = round(ih / resol * 2.54, 3)
    elif unit == 'mm':
        wr = round(iw / resol * 2.54 * 10, 3)
        hr = round(ih / resol * 2.54 * 10, 3)
    else:
        wr = "same"
    if wr != "same":
        grass.message("at a resolution of {} ppi this is:".format(str(resol)))
        grass.message("{0} {2} x {1} {2}\n".format(str(wr), str(hr), unit))
    grass.message("----------------------------\n")
Exemplo n.º 31
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    use_raster_region = flags["r"]
    method = options["method"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors",
                                   input="dummy",
                                   output="dummy",
                                   run_=False,
                                   finish_=False,
                                   size=int(size),
                                   method=method,
                                   overwrite=overwrite,
                                   quiet=True)

    gregion_module = pymod.Module(
        "g.region",
        raster="dummy",
        run_=False,
        finish_=False,
    )

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(
                map.temporal_extent.get_start_time(), sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffix(base, count, time_suffix)

        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())

        if use_raster_region is True:
            reg = copy.deepcopy(gregion_module)
            reg(raster=map.get_id())
            print(reg.get_bash())
            print(mod.get_bash())
            mm = pymod.MultiModule([reg, mod],
                                   sync=False,
                                   set_temp_region=True)
            process_queue.put(mm)
        else:
            print(mod.get_bash())
            process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()
    proc_list = process_queue.get_finished_modules()

    # Check return status of all finished modules
    error = 0
    for proc in proc_list:
        if proc.popen.returncode != 0:
            grass.error(
                _("Error running module: %\n    stderr: %s") %
                (proc.get_bash(), proc.outputs.stderr))
            error += 1

    if error > 0:
        grass.fatal(_("Error running modules."))

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
Exemplo n.º 32
0
def extendLine(map, map_out, maxlen=200, scale=0.5, debug=False, verbose=1):
    #
    # map=Input map name
    # map_out=Output map with extensions
    # maxlen=Max length in map units that line can be extended (def=200)
    # scale=Maximum length of extension as proportion of original line, disabled if 0 (def=0.5)
    # vlen=number of verticies to look back in calculating line end direction (def=1)
    # Not sure if it is worth putting this in as parameter.
    #
    allowOverwrite = os.getenv('GRASS_OVERWRITE', '0') == '1'
    grass.info("map={}, map_out={}, maxlen={}, scale={}, debug={}".format(
        map, map_out, maxlen, scale, debug))
    vlen = 1  # not sure if this is worth putting in as parameter
    cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'parent', 'INTEGER'),
            (u'dend', 'TEXT'), (u'orgx', 'DOUBLE PRECISION'),
            (u'orgy', 'DOUBLE PRECISION'), (u'search_len', 'DOUBLE PRECISION'),
            (u'search_az', 'DOUBLE PRECISION'), (u'best_xid', 'INTEGER'),
            (u'near_x', 'DOUBLE PRECISION'), (u'near_y', 'DOUBLE PRECISION'),
            (u'other_cat', 'INTEGER'), (u'xtype', 'TEXT'),
            (u'x_len', 'DOUBLE PRECISION')]
    extend = VectorTopo('extend')
    if extend.exist():
        extend.remove()
    extend.open('w', tab_name='extend', tab_cols=cols)
    #
    # Go through input map, looking at each line and it's two nodes to find nodes
    # with only a single line starting/ending there - i.e. a dangle.
    # For each found, generate an extension line in the new map "extend"
    #
    inMap = VectorTopo(map)
    inMap.open('r')
    dangleCnt = 0
    tickLen = len(inMap)
    grass.info("Searching {} features for dangles".format(tickLen))
    ticker = 0
    grass.message("Percent complete...")
    for ln in inMap:
        ticker = (ticker + 1)
        grass.percent(ticker, tickLen, 5)
        if ln.gtype == 2:  # Only process lines
            for nd in ln.nodes():
                if nd.nlines == 1:  # We have a dangle
                    dangleCnt = dangleCnt + 1
                    vtx = min(len(ln) - 1, vlen)
                    if len([1 for _ in nd.lines(only_out=True)
                            ]) == 1:  # Dangle starting at node
                        dend = "head"
                        sx = ln[0].x
                        sy = ln[0].y
                        dx = sx - ln[vtx].x
                        dy = sy - ln[vtx].y
                    else:  # Dangle ending at node
                        dend = "tail"
                        sx = ln[-1].x
                        sy = ln[-1].y
                        dx = sx - ln[-(vtx + 1)].x
                        dy = sy - ln[-(vtx + 1)].y
                    endaz = math.atan2(dy, dx)
                    if scale > 0:
                        extLen = min(ln.length() * scale, maxlen)
                    else:
                        extLen = maxlen
                    ex = extLen * math.cos(endaz) + sx
                    ey = extLen * math.sin(endaz) + sy
                    extLine = geo.Line([(sx, sy), (ex, ey)])
                    quiet = extend.write(extLine,
                                         (ln.cat, dend, sx, sy, extLen, endaz,
                                          0, 0, 0, 0, 'null', extLen))

    grass.info(
        "{} dangle nodes found, committing table extend".format(dangleCnt))
    extend.table.conn.commit()
    extend.close(build=True, release=True)
    inMap.close()

    #
    # Create two tables where extensions intersect;
    # 1. intersect with original lines
    # 2. intersect with self - to extract intersects between extensions
    #
    # First the intersects with original lines
    grass.info(
        "Searching for intersects between potential extensions and original lines"
    )
    table_isectIn = Table('isectIn',
                          connection=sqlite3.connect(get_path(path)))
    if table_isectIn.exist():
        table_isectIn.drop(force=True)
    run_command("v.distance",
                flags='a',
                overwrite=True,
                quiet=True,
                from_="extend",
                from_type="line",
                to=map,
                to_type="line",
                dmax="0",
                upload="cat,dist,to_x,to_y",
                column="near_cat,dist,nx,ny",
                table="isectIn")
    # Will have touched the dangle it comes from, so remove those touches
    run_command(
        "db.execute",
        sql=
        "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn INNER JOIN extend ON from_cat=cat WHERE near_cat=parent)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="ALTER TABLE isectIn ADD ntype VARCHAR",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="UPDATE isectIn SET ntype = 'orig' ",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    #
    # Now second self intersect table
    #
    grass.info("Searching for intersects of potential extensions")
    table_isectX = Table('isectX', connection=sqlite3.connect(get_path(path)))
    if table_isectX.exist():
        table_isectX.drop(force=True)
    run_command("v.distance",
                flags='a',
                overwrite=True,
                quiet=True,
                from_="extend",
                from_type="line",
                to="extend",
                to_type="line",
                dmax="0",
                upload="cat,dist,to_x,to_y",
                column="near_cat,dist,nx,ny",
                table="isectX")
    # Obviously all extensions will intersect with themself, so remove those "intersects"
    run_command("db.execute",
                sql="DELETE FROM isectX WHERE from_cat = near_cat",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="ALTER TABLE isectX ADD ntype VARCHAR",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="UPDATE isectX SET ntype = 'ext' ",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    #
    # Combine the two tables and add a few more attributes
    #
    run_command("db.execute",
                sql="INSERT INTO isectIn SELECT * FROM isectX",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    cols_isectIn = Columns('isectIn',
                           connection=sqlite3.connect(get_path(path)))
    cols_isectIn.add(['from_x'], ['DOUBLE PRECISION'])
    cols_isectIn.add(['from_y'], ['DOUBLE PRECISION'])
    cols_isectIn.add(['ext_len'], ['DOUBLE PRECISION'])
    # Get starting coordinate at the end of the dangle
    run_command(
        "db.execute",
        sql=
        "UPDATE isectIn SET from_x = (SELECT extend.orgx FROM extend WHERE from_cat=extend.cat)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command(
        "db.execute",
        sql=
        "UPDATE isectIn SET from_y = (SELECT extend.orgy FROM extend WHERE from_cat=extend.cat)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()
    # For each intersect point, calculate the distance along extension line from end of dangle
    # Would be nicer to do this in the database but SQLite dosen't support sqrt or exponents
    grass.info(
        "Calculating distances of intersects along potential extensions")
    cur = table_isectIn.execute(
        sql_code="SELECT rowid, from_x, from_y, nx, ny FROM isectIn")
    for row in cur.fetchall():
        rowid, fx, fy, nx, ny = row
        x_len = math.sqrt((fx - nx)**2 + (fy - ny)**2)
        sqlStr = "UPDATE isectIn SET ext_len={:.8f} WHERE rowid={:d}".format(
            x_len, rowid)
        table_isectIn.execute(sql_code=sqlStr)
    grass.verbose("Ready to commit isectIn changes")
    table_isectIn.conn.commit()
    # Remove any zero distance from end of their dangle.
    # This happens when another extension intersects exactly at that point
    run_command("db.execute",
                sql="DELETE FROM isectIn WHERE ext_len = 0.0",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()

    # Go through the extensions and find the intersect closest to each origin.
    grass.info("Searching for closest intersect for each potential extension")

    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN bst INTEGER"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nrx DOUBLE PRECISION"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nry DOUBLE PRECISION"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN ocat TEXT"
    #    run_command("db.execute",
    #                sql = "INSERT OR REPLACE INTO extend_t1 (bst, nrx, nry, ocat) VALUES ((SELECT isectIn.rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=extend_t1.cat ORDER BY ext_len ASC LIMIT 1))",
    #               driver = "sqlite",
    #               database = "$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")

    grass.verbose("CREATE index")
    run_command("db.execute",
                sql="CREATE INDEX idx_from_cat ON isectIn (from_cat)",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE best_xid")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET best_xid = (SELECT isectIn.rowid FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE x_len")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET x_len = (SELECT ext_len FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE near_x")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET near_x = (SELECT nx FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE near_y")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET near_y = (SELECT ny FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE other_cat")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET other_cat = (SELECT near_cat FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE xtype")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET xtype = (SELECT ntype FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("DROP index")
    run_command("db.execute",
                sql="DROP INDEX idx_from_cat",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("CREATE index on near_cat")
    run_command("db.execute",
                sql="CREATE INDEX idx_near_cat ON isectIn (near_cat)",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")

    quiet = table_isectIn.filters.select('rowid', 'ext_len', 'nx', 'ny',
                                         'near_cat', 'ntype')
    #    quiet=table_isectIn.filters.order_by(['ext_len ASC'])
    quiet = table_isectIn.filters.order_by('ext_len ASC')
    quiet = table_isectIn.filters.limit(1)
    table_extend = Table('extend', connection=sqlite3.connect(get_path(path)))

    # Code below was relplaced by commands above untill memory problem can be sorted
    #    table_extend.filters.select('cat')
    #    cur=table_extend.execute()
    #    updateCnt = 0
    #    for row in cur.fetchall():
    #        cat, = row
    #        quiet=table_isectIn.filters.where('from_cat={:d}'.format(cat))

    ##SELECT rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=32734 ORDER BY ext_len ASC LIMIT 1

    #        x_sect=table_isectIn.execute().fetchone()
    #        if x_sect is not None:
    #            x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect
    #            sqlStr="UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(x_rowid, ext_len, nx, ny, other_cat, ntype, cat)
    #            table_extend.execute(sql_code=sqlStr)
    ## Try periodic commit to avoide crash!
    #            updateCnt = (updateCnt + 1) % 10000
    #            if updateCnt == 0:
    #              table_extend.conn.commit()
    grass.verbose("Ready to commit extend changes")
    table_extend.conn.commit()
    #
    # There may be extensions that crossed, and that intersection chosen by one but
    # not "recripricated" by the other.
    # Need to remove those possibilities and allow the jilted extension to re-search.
    #
    grass.verbose("Deleting intersects already resolved")
    run_command(
        "db.execute",
        sql=
        "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn JOIN extend ON near_cat=cat WHERE ntype='ext' AND xtype!='null')",  #"AND from_cat!=other_cat" no second chance!
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()
    grass.verbose("Deleting complete")

    # To find the jilted - need a copy of extensions that have found an
    # intersection (won't overwrite so drop first)
    grass.verbose(
        "Re-searching for mis-matched intersects between potential extensions")
    table_imatch = Table('imatch', connection=sqlite3.connect(get_path(path)))
    if table_imatch.exist():
        table_imatch.drop(force=True)
    wvar = "xtype!='null'"
    run_command(
        "db.copy",
        overwrite=True,
        quiet=True,
        from_driver="sqlite",
        from_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
        from_table="extend",
        to_driver="sqlite",
        to_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
        to_table="imatch",
        where=wvar)
    # Memory problems?
    if gc.isenabled():
        grass.verbose("Garbage collection enabled - forcing gc cycle")
        gc.collect()
    else:
        grass.verbose("Garbage collection not enabled")
# Ensure tables are commited
    table_extend.conn.commit()
    table_imatch.conn.commit()
    table_isectIn.conn.commit()
    # Identify the jilted
    sqlStr = "SELECT extend.cat FROM extend JOIN imatch ON extend.other_cat=imatch.cat WHERE extend.xtype='ext' and extend.cat!=imatch.other_cat"
    cur = table_extend.execute(sql_code=sqlStr)
    updateCnt = 0
    for row in cur.fetchall():
        cat, = row
        grass.verbose("Reworking extend.cat={}".format(cat))
        quiet = table_isectIn.filters.where('from_cat={:d}'.format(cat))
        #print("SQL: {}".format(table_isectIn.filters.get_sql()))
        x_sect = table_isectIn.execute().fetchone(
        )  ## Problem here under modules
        if x_sect is None:
            sqlStr = "UPDATE extend SET best_xid=0, x_len=search_len, near_x=0, near_y=0, other_cat=0, xtype='null' WHERE cat={:d}".format(
                cat)
        else:
            x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect
            sqlStr = "UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(
                x_rowid, ext_len, nx, ny, other_cat, ntype, cat)
        table_extend.execute(sql_code=sqlStr)
        ## Try periodic commit to avoide crash!
        updateCnt = (updateCnt + 1) % 100
        if (updateCnt == 0):  # or (cat == 750483):
            grass.verbose(
                "XXXXXXXXXXX Committing table_extend XXXXXXXXXXXXXXXXXXXXXX")
            table_extend.conn.commit()

    grass.verbose("Committing adjustments to table extend")
    table_extend.conn.commit()
    #
    # For debugging, create a map with the chosen intersect points
    #
    if debug:
        wvar = "xtype!='null' AND x_len!=0"
        #        print(wvar)
        run_command(
            "v.in.db",
            overwrite=True,
            quiet=True,
            table="extend",
            driver="sqlite",
            database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
            x="near_x",
            y="near_y",
            key="cat",
            where=wvar,
            output="chosen")
#
# Finally adjust the dangle lines in input map - use a copy (map_out) if requested
#
    if map_out:
        run_command("g.copy",
                    overwrite=allowOverwrite,
                    quiet=True,
                    vector=map + "," + map_out)
    else:  # Otherwise just modify the original dataset (map)
        if allowOverwrite:
            grass.warning("Modifying vector map ({})".format(map))
            map_out = map
        else:
            grass.error(
                "Use switch --o to modifying input vector map ({})".format(
                    map))
            return 1
#
# Get info for lines that need extending
    table_extend.filters.select(
        'parent, dend, near_x, near_y, search_az, xtype')
    table_extend.filters.where("xtype!='null'")
    extLines = table_extend.execute().fetchall()
    cat_mods = [ext[0] for ext in extLines]
    tickLen = len(cat_mods)
    grass.info("Extending {} dangles".format(tickLen))
    ticker = 0
    grass.message("Percent complete...")

    # Open up the map_out copy (or the original) and work through looking for lines that need modifying
    inMap = VectorTopo(map_out)
    inMap.open('rw', tab_name=map_out)

    for ln_idx in range(len(inMap)):
        ln = inMap.read(ln_idx + 1)
        if ln.gtype == 2:  # Only process lines
            while ln.cat in cat_mods:  # Note: could be 'head' and 'tail'
                ticker = (ticker + 1)
                grass.percent(ticker, tickLen, 5)
                cat_idx = cat_mods.index(ln.cat)
                cat, dend, nx, ny, endaz, xtype = extLines.pop(cat_idx)
                dump = cat_mods.pop(cat_idx)
                if xtype == 'orig':  # Overshoot by 0.1 as break lines is unreliable
                    nx = nx + 0.1 * math.cos(endaz)
                    ny = ny + 0.1 * math.sin(endaz)
                newEnd = geo.Point(x=nx, y=ny, z=None)
                if dend == 'head':
                    ln.insert(0, newEnd)
                else:  # 'tail'
                    ln.append(newEnd)
                quiet = inMap.rewrite(ln_idx + 1, ln)
        else:
            quite = inMap.delete(ln_idx + 1)


## Try periodic commit and garbage collection to avoide crash!
        if (ln_idx % 1000) == 0:
            #           inMap.table.conn.commit()  - no such thing - Why??
            if gc.isenabled():
                quiet = gc.collect()

    inMap.close(build=True, release=True)
    grass.message("v.extendlines completing")
    #
    # Clean up temporary tables and maps
    #
    if not debug:
        table_isectIn.drop(force=True)
        table_isectX.drop(force=True)
        table_imatch.drop(force=True)
        extend.remove()
        chosen = VectorTopo('chosen')
        if chosen.exist():
            chosen.remove()
    return 0
Exemplo n.º 33
0
def main():
    from dateutil.parser import parse

    try:
        from pygbif import occurrences
        from pygbif import species
    except ImportError:
        grass.fatal(
            _("Cannot import pygbif (https://github.com/sckott/pygbif)"
              " library."
              " Please install it (pip install pygbif)"
              " or ensure that it is on path"
              " (use PYTHONPATH variable)."))

    # Parse input options
    output = options["output"]
    mask = options["mask"]
    species_maps = flags["i"]
    no_region_limit = flags["r"]
    no_topo = flags["b"]
    print_species = flags["p"]
    print_species_table = flags["t"]
    print_species_shell = flags["g"]
    print_occ_number = flags["o"]
    allow_no_geom = flags["n"]
    hasGeoIssue = flags["s"]
    taxa_list = options["taxa"].split(",")
    institutionCode = options["institutioncode"]
    basisofrecord = options["basisofrecord"]
    recordedby = options["recordedby"].split(",")
    date_from = options["date_from"]
    date_to = options["date_to"]
    country = options["country"]
    continent = options["continent"]
    rank = options["rank"]

    # Define static variable
    # Initialize cat
    cat = 0
    # Number of occurrences to fetch in one request
    chunk_size = 300
    # lat/lon proj string
    latlon_crs = [
        "+proj=longlat +no_defs +a=6378137 +rf=298.257223563 +towgs84=0.000,0.000,0.000",
        "+proj=longlat +no_defs +a=6378137 +rf=298.257223563 +towgs84=0,0,0,0,0,0,0",
        "+proj=longlat +no_defs +a=6378137 +rf=298.257223563 +towgs84=0.000,0.000,0.000 +type=crs",
    ]
    # List attributes available in Darwin Core
    # not all attributes are returned in each request
    # to avoid key errors when accessing the dictionary returned by pygbif
    # presence of DWC keys in the returned dictionary is checked using this list
    # The number of keys in this list has to be equal to the number of columns
    # in the attribute table and the attributes written for each occurrence
    dwc_keys = [
        "key",
        "taxonRank",
        "taxonKey",
        "taxonID",
        "scientificName",
        "species",
        "speciesKey",
        "genericName",
        "genus",
        "genusKey",
        "family",
        "familyKey",
        "order",
        "orderKey",
        "class",
        "classKey",
        "phylum",
        "phylumKey",
        "kingdom",
        "kingdomKey",
        "eventDate",
        "verbatimEventDate",
        "startDayOfYear",
        "endDayOfYear",
        "year",
        "month",
        "day",
        "occurrenceID",
        "occurrenceStatus",
        "occurrenceRemarks",
        "Habitat",
        "basisOfRecord",
        "preparations",
        "sex",
        "type",
        "locality",
        "verbatimLocality",
        "decimalLongitude",
        "decimalLatitude",
        "coordinateUncertaintyInMeters",
        "geodeticDatum",
        "higerGeography",
        "continent",
        "country",
        "countryCode",
        "stateProvince",
        "gbifID",
        "protocol",
        "identifier",
        "recordedBy",
        "identificationID",
        "identifiers",
        "dateIdentified",
        "modified",
        "institutionCode",
        "lastInterpreted",
        "lastParsed",
        "references",
        "relations",
        "catalogNumber",
        "occurrenceDetails",
        "datasetKey",
        "datasetName",
        "collectionCode",
        "rights",
        "rightsHolder",
        "license",
        "publishingOrgKey",
        "publishingCountry",
        "lastCrawled",
        "specificEpithet",
        "facts",
        "issues",
        "extensions",
        "language",
    ]
    # Deinfe columns for attribute table
    cols = [
        ("cat", "INTEGER PRIMARY KEY"),
        ("g_search", "varchar(100)"),
        ("g_key", "integer"),
        ("g_taxonrank", "varchar(50)"),
        ("g_taxonkey", "integer"),
        ("g_taxonid", "varchar(50)"),
        ("g_scientificname", "varchar(255)"),
        ("g_species", "varchar(255)"),
        ("g_specieskey", "integer"),
        ("g_genericname", "varchar(255)"),
        ("g_genus", "varchar(50)"),
        ("g_genuskey", "integer"),
        ("g_family", "varchar(50)"),
        ("g_familykey", "integer"),
        ("g_order", "varchar(50)"),
        ("g_orderkey", "integer"),
        ("g_class", "varchar(50)"),
        ("g_classkey", "integer"),
        ("g_phylum", "varchar(50)"),
        ("g_phylumkey", "integer"),
        ("g_kingdom", "varchar(50)"),
        ("g_kingdomkey", "integer"),
        ("g_eventdate", "text"),
        ("g_verbatimeventdate", "varchar(50)"),
        ("g_startDayOfYear", "integer"),
        ("g_endDayOfYear", "integer"),
        ("g_year", "integer"),
        ("g_month", "integer"),
        ("g_day", "integer"),
        ("g_occurrenceid", "varchar(255)"),
        ("g_occurrenceStatus", "varchar(50)"),
        ("g_occurrenceRemarks", "varchar(50)"),
        ("g_Habitat", "varchar(50)"),
        ("g_basisofrecord", "varchar(50)"),
        ("g_preparations", "varchar(50)"),
        ("g_sex", "varchar(50)"),
        ("g_type", "varchar(50)"),
        ("g_locality", "varchar(255)"),
        ("g_verbatimlocality", "varchar(255)"),
        ("g_decimallongitude", "double precision"),
        ("g_decimallatitude", "double precision"),
        ("g_coordinateUncertaintyInMeters", "double precision"),
        ("g_geodeticdatum", "varchar(50)"),
        ("g_higerGeography", "varchar(255)"),
        ("g_continent", "varchar(50)"),
        ("g_country", "varchar(50)"),
        ("g_countryCode", "varchar(50)"),
        ("g_stateProvince", "varchar(50)"),
        ("g_gbifid", "varchar(255)"),
        ("g_protocol", "varchar(255)"),
        ("g_identifier", "varchar(50)"),
        ("g_recordedby", "varchar(255)"),
        ("g_identificationid", "varchar(255)"),
        ("g_identifiers", "text"),
        ("g_dateidentified", "text"),
        ("g_modified", "text"),
        ("g_institutioncode", "varchar(50)"),
        ("g_lastinterpreted", "text"),
        ("g_lastparsed", "text"),
        ("g_references", "varchar(255)"),
        ("g_relations", "text"),
        ("g_catalognumber", "varchar(50)"),
        ("g_occurrencedetails", "text"),
        ("g_datasetkey", "varchar(50)"),
        ("g_datasetname", "varchar(255)"),
        ("g_collectioncode", "varchar(50)"),
        ("g_rights", "varchar(255)"),
        ("g_rightsholder", "varchar(255)"),
        ("g_license", "varchar(50)"),
        ("g_publishingorgkey", "varchar(50)"),
        ("g_publishingcountry", "varchar(50)"),
        ("g_lastcrawled", "text"),
        ("g_specificepithet", "varchar(50)"),
        ("g_facts", "text"),
        ("g_issues", "text"),
        ("g_extensions", "text"),
        ("g_language", "varchar(50)"),
    ]

    # maybe no longer required in Python3
    set_output_encoding()
    # Set temporal filter if requested by user
    # Initialize eventDate filter
    eventDate = None
    # Check if date from is compatible (ISO compliant)
    if date_from:
        try:
            parse(date_from)
        except:
            grass.fatal("Invalid invalid start date provided")

        if date_from and not date_to:
            eventDate = "{}".format(date_from)
    # Check if date to is compatible (ISO compliant)
    if date_to:
        try:
            parse(date_to)
        except:
            grass.fatal("Invalid invalid end date provided")
        # Check if date to is after date_from
        if parse(date_from) < parse(date_to):
            eventDate = "{},{}".format(date_from, date_to)
        else:
            grass.fatal(
                "Invalid date range: End date has to be after start date!")
    # Set filter on basisOfRecord if requested by user
    if basisofrecord == "ALL":
        basisOfRecord = None
    else:
        basisOfRecord = basisofrecord
    # Allow also occurrences with spatial issues if requested by user
    hasGeospatialIssue = False
    if hasGeoIssue:
        hasGeospatialIssue = True
    # Allow also occurrences without coordinates if requested by user
    hasCoordinate = True
    if allow_no_geom:
        hasCoordinate = False

    # Set reprojection parameters
    # Set target projection of current LOCATION
    proj_info = grass.parse_command("g.proj", flags="g")
    target_crs = grass.read_command("g.proj", flags="fj").rstrip()
    target = osr.SpatialReference()

    # Prefer EPSG CRS definitions
    if proj_info["epsg"]:
        target.ImportFromEPSG(int(proj_info["epsg"]))
    else:
        target.ImportFromProj4(target_crs)

    # GDAL >= 3 swaps x and y axis, see: github.com/gdal/issues/1546
    if int(gdal_version[0]) >= 3:
        target.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)

    if target_crs == "XY location (unprojected)":
        grass.fatal("Sorry, XY locations are not supported!")

    # Set source projection from GBIF
    source = osr.SpatialReference()
    source.ImportFromEPSG(4326)
    # GDAL >= 3 swaps x and y axis, see: github.com/gdal/issues/1546
    if int(gdal_version[0]) >= 3:
        source.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)

    if target_crs not in latlon_crs:
        transform = osr.CoordinateTransformation(source, target)
        reverse_transform = osr.CoordinateTransformation(target, source)

    # Generate WKT polygon to use for spatial filtering if requested
    if mask:
        if len(mask.split("@")) == 2:
            m = VectorTopo(mask.split("@")[0], mapset=mask.split("@")[1])
        else:
            m = VectorTopo(mask)
        if not m.exist():
            grass.fatal("Could not find vector map <{}>".format(mask))
        m.open("r")
        if not m.is_open():
            grass.fatal("Could not open vector map <{}>".format(mask))

        # Use map Bbox as spatial filter if map contains <> 1 area
        if m.number_of("areas") == 1:
            region_pol = [area.to_wkt() for area in m.viter("areas")][0]
        else:
            bbox = (str(m.bbox()).replace("Bbox(", "").replace(
                " ", "").rstrip(")").split(","))
            region_pol = "POLYGON (({0} {1}, {0} {3}, {2} {3}, {2} {1}, {0} {1}))".format(
                bbox[2], bbox[0], bbox[3], bbox[1])
        m.close()
    else:
        # Do not limit import spatially if LOCATION is able to take global data
        if no_region_limit:
            if target_crs not in latlon_crs:
                grass.fatal("Import of data from outside the current region is"
                            "only supported in a WGS84 location!")
            region_pol = None
        else:
            # Limit import spatially to current region
            # if LOCATION is !NOT! able to take global data
            # to avoid pprojection ERRORS
            region = grass.parse_command("g.region", flags="g")
            region_pol = "POLYGON (({0} {1},{0} {3},{2} {3},{2} {1},{0} {1}))".format(
                region["e"], region["n"], region["w"], region["s"])

    # Do not reproject in latlon LOCATIONS
    if target_crs not in latlon_crs:
        pol = ogr.CreateGeometryFromWkt(region_pol)
        pol.Transform(reverse_transform)
        pol = pol.ExportToWkt()
    else:
        pol = region_pol

    # Create output map if not output maps for each species are requested
    if (not species_maps and not print_species and not print_species_shell
            and not print_occ_number and not print_species_table):
        mapname = output
        new = Vector(mapname)
        new.open("w", tab_name=mapname, tab_cols=cols)
        cat = 1

    # Import data for each species
    for s in taxa_list:
        # Get the taxon key if not the taxon key is provided as input
        try:
            key = int(s)
        except:
            try:
                species_match = species.name_backbone(s,
                                                      rank=rank,
                                                      strict=False,
                                                      verbose=True)
                key = species_match["usageKey"]
            except:
                grass.error(
                    "Data request for taxon {} failed. Are you online?".format(
                        s))
                continue

        # Return matching taxon and alternatives and exit
        if print_species:
            print("Matching taxon for {} is:".format(s))
            print("{} {}".format(species_match["scientificName"],
                                 species_match["status"]))
            if "alternatives" in list(species_match.keys()):
                print("Alternative matches might be: {}".format(s))
                for m in species_match["alternatives"]:
                    print("{} {}".format(m["scientificName"], m["status"]))
            else:
                print("No alternatives found for the given taxon")
            continue
        if print_species_shell:
            print("match={}".format(species_match["scientificName"]))
            if "alternatives" in list(species_match.keys()):
                alternatives = []
                for m in species_match["alternatives"]:
                    alternatives.append(m["scientificName"])
                print("alternatives={}".format(",".join(alternatives)))
            continue
        if print_species_table:
            if "alternatives" in list(species_match.keys()):
                if len(species_match["alternatives"]) == 0:
                    print("{0}|{1}|{2}|".format(
                        s, key, species_match["scientificName"]))
                else:
                    alternatives = []
                    for m in species_match["alternatives"]:
                        alternatives.append(m["scientificName"])
                    print("{0}|{1}|{2}|{3}".format(
                        s,
                        key,
                        species_match["scientificName"],
                        ",".join(alternatives),
                    ))
            continue
        try:
            returns_n = occurrences.search(
                taxonKey=key,
                hasGeospatialIssue=hasGeospatialIssue,
                hasCoordinate=hasCoordinate,
                institutionCode=institutionCode,
                basisOfRecord=basisOfRecord,
                recordedBy=recordedby,
                eventDate=eventDate,
                continent=continent,
                country=country,
                geometry=pol,
                limit=1,
            )["count"]
        except:
            grass.error(
                "Data request for taxon {} faild. Are you online?".format(s))
            returns_n = 0

        # Exit if search does not give a return
        # Print only number of returns for the given search and exit
        if print_occ_number:
            print("Found {0} occurrences for taxon {1}...".format(
                returns_n, s))
            continue
        elif returns_n <= 0:
            grass.warning(
                "No occurrences for current search for taxon {0}...".format(s))
            continue
        elif returns_n >= 200000:
            grass.warning(
                "Your search for {1} returns {0} records.\n"
                "Unfortunately, the GBIF search API is limited to 200,000 records per request.\n"
                "The download will be incomplete. Please consider to split up your search."
                .format(returns_n, s))

        # Get the number of chunks to download
        chunks = int(math.ceil(returns_n / float(chunk_size)))
        grass.verbose("Downloading {0} occurrences for taxon {1}...".format(
            returns_n, s))

        # Create a map for each species if requested using map name as suffix
        if species_maps:
            mapname = "{}_{}".format(s.replace(" ", "_"), output)

            new = Vector(mapname)
            new.open("w", tab_name=mapname, tab_cols=cols)
            cat = 0

        # Download the data from GBIF
        for c in range(chunks):
            # Define offset
            offset = c * chunk_size
            # Adjust chunk_size to the hard limit of 200,000 records in GBIF API
            # if necessary
            if offset + chunk_size >= 200000:
                chunk_size = 200000 - offset
            # Get the returns for the next chunk
            returns = occurrences.search(
                taxonKey=key,
                hasGeospatialIssue=hasGeospatialIssue,
                hasCoordinate=hasCoordinate,
                institutionCode=institutionCode,
                basisOfRecord=basisOfRecord,
                recordedBy=recordedby,
                eventDate=eventDate,
                continent=continent,
                country=country,
                geometry=pol,
                limit=chunk_size,
                offset=offset,
            )

            # Write the returned data to map and attribute table
            for res in returns["results"]:
                if target_crs not in latlon_crs:
                    point = ogr.CreateGeometryFromWkt("POINT ({} {})".format(
                        res["decimalLongitude"], res["decimalLatitude"]))
                    point.Transform(transform)
                    x = point.GetX()
                    y = point.GetY()
                else:
                    x = res["decimalLongitude"]
                    y = res["decimalLatitude"]

                point = Point(x, y)

                for k in dwc_keys:
                    if k not in list(res.keys()):
                        res.update({k: None})

                cat = cat + 1
                new.write(
                    point,
                    cat=cat,
                    attrs=(
                        "{}".format(s),
                        res["key"],
                        res["taxonRank"],
                        res["taxonKey"],
                        res["taxonID"],
                        res["scientificName"],
                        res["species"],
                        res["speciesKey"],
                        res["genericName"],
                        res["genus"],
                        res["genusKey"],
                        res["family"],
                        res["familyKey"],
                        res["order"],
                        res["orderKey"],
                        res["class"],
                        res["classKey"],
                        res["phylum"],
                        res["phylumKey"],
                        res["kingdom"],
                        res["kingdomKey"],
                        "{}".format(res["eventDate"])
                        if res["eventDate"] else None,
                        "{}".format(res["verbatimEventDate"])
                        if res["verbatimEventDate"] else None,
                        res["startDayOfYear"],
                        res["endDayOfYear"],
                        res["year"],
                        res["month"],
                        res["day"],
                        res["occurrenceID"],
                        res["occurrenceStatus"],
                        res["occurrenceRemarks"],
                        res["Habitat"],
                        res["basisOfRecord"],
                        res["preparations"],
                        res["sex"],
                        res["type"],
                        res["locality"],
                        res["verbatimLocality"],
                        res["decimalLongitude"],
                        res["decimalLatitude"],
                        res["coordinateUncertaintyInMeters"],
                        res["geodeticDatum"],
                        res["higerGeography"],
                        res["continent"],
                        res["country"],
                        res["countryCode"],
                        res["stateProvince"],
                        res["gbifID"],
                        res["protocol"],
                        res["identifier"],
                        res["recordedBy"],
                        res["identificationID"],
                        ",".join(res["identifiers"]),
                        "{}".format(res["dateIdentified"])
                        if res["dateIdentified"] else None,
                        "{}".format(res["modified"])
                        if res["modified"] else None,
                        res["institutionCode"],
                        "{}".format(res["lastInterpreted"])
                        if res["lastInterpreted"] else None,
                        "{}".format(res["lastParsed"])
                        if res["lastParsed"] else None,
                        res["references"],
                        ",".join(res["relations"]),
                        res["catalogNumber"],
                        "{}".format(res["occurrenceDetails"])
                        if res["occurrenceDetails"] else None,
                        res["datasetKey"],
                        res["datasetName"],
                        res["collectionCode"],
                        res["rights"],
                        res["rightsHolder"],
                        res["license"],
                        res["publishingOrgKey"],
                        res["publishingCountry"],
                        "{}".format(res["lastCrawled"])
                        if res["lastCrawled"] else None,
                        res["specificEpithet"],
                        ",".join(res["facts"]),
                        ",".join(res["issues"]),
                        ",".join(res["extensions"]),
                        res["language"],
                    ),
                )

                cat = cat + 1

        # Close the current map if a map for each species is requested
        if species_maps:
            new.table.conn.commit()
            new.close()
            if not no_topo:
                grass.run_command("v.build", map=mapname, option="build")

            # Write history to map
            grass.vector_history(mapname)

    # Close the output map if not a map for each species is requested
    if (not species_maps and not print_species and not print_species_shell
            and not print_occ_number and not print_species_table):
        new.table.conn.commit()
        new.close()
        if not no_topo:
            grass.run_command("v.build", map=mapname, option="build")

        # Write history to map
        grass.vector_history(mapname)
Exemplo n.º 34
0
import numpy as np
from numpy.linalg.linalg import LinAlgError


if "GISBASE" not in os.environ:
    sys.stderr.write("You must be in GRASS GIS to run this program.\n")
    sys.exit(1)

import grass.script as grass
from grass.pygrass import raster
from grass.pygrass.gis.region import Region

try:
    import statsmodels.api as sm
except ImportError:
    grass.error("Can't import statsmodels. Install statmodels package.")
    sys.exit(1)


CNULL = -2147483648  # null value for CELL maps
FNULL = np.nan       # null value for FCELL and DCELL maps


def get_val_or_null(map, row, col):
    """
    Return map value of the cell or FNULL (if the cell is null)
    """
    value = map.get(row, col)
    if map.mtype == "CELL" and value == CNULL:
        value = FNULL
    return value
Exemplo n.º 35
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    use_raster_region = flags["r"]
    method = options["method"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(_("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output, "strds", dbif=dbif,
                                               overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors", input="dummy",
                                   output="dummy", run_=False,
                                   finish_=False, size=int(size),
                                   method=method, overwrite=overwrite,
                                   quiet=True)

    gregion_module =  pymod.Module("g.region", raster="dummy", run_=False,
                                   finish_=False,)

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(map.temporal_extent.get_start_time(),
                                                      sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffic(base, count, time_suffix)

        new_map = tgis.open_new_map_dataset(map_name, None, type="raster",
                                            temporal_extent=map.get_temporal_extent(),
                                            overwrite=overwrite, dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())

        if use_raster_region is True:
            reg = copy.deepcopy(gregion_module)
            reg(raster=map.get_id())
            print(reg.get_bash())
            print(mod.get_bash())
            mm = pymod.MultiModule([reg, mod], sync=False, set_temp_region=True)
            process_queue.put(mm)
        else:
            print(mod.get_bash())
            process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()
    proc_list = process_queue.get_finished_modules()

    # Check return status of all finished modules
    error = 0
    for proc in proc_list:
        if proc.popen.returncode != 0:
            grass.error(_("Error running module: %\n    stderr: %s") %(proc.get_bash(), proc.outputs.stderr))
            error += 1

    if error > 0:
        grass.fatal(_("Error running modules."))

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title,
                                descr, stype, dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count%10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove", flags='f', type='raster', name=names, quiet=True)

    dbif.close()
Exemplo n.º 36
0
def main(options, flags):
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    points = options["points"]
    coordinates = options["coordinates"]
    strds = options["strds"]
    output = options["output"]
    where = options["where"]
    order = options["order"]
    layout = options["layout"]
    null_value = options["null_value"]
    separator = gscript.separator(options["separator"])

    nprocs = int(options["nprocs"])
    write_header = flags["n"]
    use_stdin = flags["i"]
    vcat = flags["v"]

    #output_cat_label = flags["f"]
    #output_color = flags["r"]
    #output_cat = flags["i"]

    overwrite = gscript.overwrite()

    if coordinates and points:
        gscript.fatal(
            _("Options coordinates and points are mutually exclusive"))

    if not coordinates and not points and not use_stdin:
        gscript.fatal(
            _("Please specify the coordinates, the points option or use the 'i' flag to pipe coordinate positions to t.rast.what from stdin, to provide the sampling coordinates"
              ))

    if vcat and not points:
        gscript.fatal(_("Flag 'v' required option 'points'"))

    if use_stdin:
        coordinates_stdin = str(sys.__stdin__.read())
        # Check if coordinates are given with site names or IDs
        stdin_length = len(coordinates_stdin.split('\n')[0].split())
        if stdin_length <= 2:
            site_input = False
        elif stdin_length >= 3:
            site_input = True
    else:
        site_input = False

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(strds, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where,
                                             order=order,
                                             dbif=dbif)
    dbif.close()
    if not maps:
        gscript.fatal(
            _("Space time raster dataset <%s> is empty") % sp.get_id())

    # Setup flags are disabled due to test issues
    flags = ""
    #if output_cat_label is True:
    #    flags += "f"
    #if output_color is True:
    #    flags += "r"
    #if output_cat is True:
    #    flags += "i"
    if vcat is True:
        flags += "v"

    # Configure the r.what module
    if points:
        r_what = pymod.Module("r.what",
                              map="dummy",
                              output="dummy",
                              run_=False,
                              separator=separator,
                              points=points,
                              overwrite=overwrite,
                              flags=flags,
                              null_value=null_value,
                              quiet=True)
    elif coordinates:
        # Create a list of values
        coord_list = coordinates.split(",")
        r_what = pymod.Module("r.what",
                              map="dummy",
                              output="dummy",
                              run_=False,
                              separator=separator,
                              coordinates=coord_list,
                              overwrite=overwrite,
                              flags=flags,
                              null_value=null_value,
                              quiet=True)
    elif use_stdin:
        r_what = pymod.Module("r.what",
                              map="dummy",
                              output="dummy",
                              run_=False,
                              separator=separator,
                              stdin_=coordinates_stdin,
                              overwrite=overwrite,
                              flags=flags,
                              null_value=null_value,
                              quiet=True)
    else:
        gscript.error(_("Please specify points or coordinates"))

    if len(maps) < nprocs:
        nprocs = len(maps)

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))
    num_maps = len(maps)

    # 400 Maps is the absolute maximum in r.what
    # We need to determie the number of maps that can be processed
    # in parallel

    # First estimate the number of maps per process. We use 400 maps
    # simultaniously as maximum for a single process

    num_loops = int(num_maps / (400 * nprocs))
    remaining_maps = num_maps % (400 * nprocs)

    if num_loops == 0:
        num_loops = 1
        remaining_maps = 0

    # Compute the number of maps for each process
    maps_per_loop = int((num_maps - remaining_maps) / num_loops)
    maps_per_process = int(maps_per_loop / nprocs)
    remaining_maps_per_loop = maps_per_loop % nprocs

    # We put the output files in an ordered list
    output_files = []
    output_time_list = []

    count = 0
    for loop in range(num_loops):
        file_name = gscript.tempfile() + "_%i" % (loop)
        count = process_loop(nprocs, maps, file_name, count, maps_per_process,
                             remaining_maps_per_loop, output_files,
                             output_time_list, r_what, process_queue)

    process_queue.wait()

    gscript.verbose("Number of raster map layers remaining for sampling %i" %
                    (remaining_maps))
    if remaining_maps > 0:
        # Use a single process if less then 100 maps
        if remaining_maps <= 100:
            map_names = []
            for i in range(remaining_maps):
                map = maps[count]
                map_names.append(map.get_id())
                count += 1
            mod = copy.deepcopy(r_what)
            mod(map=map_names, output=file_name)
            process_queue.put(mod)
        else:
            maps_per_process = int(remaining_maps / nprocs)
            remaining_maps_per_loop = remaining_maps % nprocs

            file_name = "out_remain"
            process_loop(nprocs, maps, file_name, count, maps_per_process,
                         remaining_maps_per_loop, output_files,
                         output_time_list, r_what, process_queue)

    # Wait for unfinished processes
    process_queue.wait()

    # Out the output files in the correct order together
    if layout == "row":
        one_point_per_row_output(separator, output_files, output_time_list,
                                 output, write_header, site_input, vcat)
    elif layout == "col":
        one_point_per_col_output(separator, output_files, output_time_list,
                                 output, write_header, site_input, vcat)
    else:
        one_point_per_timerow_output(separator, output_files, output_time_list,
                                     output, write_header, site_input, vcat)
def main(options, flags):
    # TODO: inervals flag, s should be defaut behavior
    n_colors = int(options['ncolors'])
    discrete = flags['d']

    fallback = True
    try:
        import seaborn as sns
        fallback = False
    except ImportError:
        # perhaps this can be function in the core
        gscript.error(_("{} Python package not installed.").format('seaborn'))
    if not fallback:
        cmap = sns.cubehelix_palette(n_colors=n_colors,
                                     start=float(options['start']),
                                     rot=float(options['nrotations']),
                                     gamma=float(options['gamma']),
                                     hue=float(options['hue']),
                                     light=float(options['light']),
                                     dark=float(options['dark']),
                                     reverse=flags['n'],
                                     as_cmap=False)
        # as_cmap ignores n_colors in 0.7.0
        # but we want n_colors to be exact when we are exporting
        # the color table or doing discrete one
        import matplotlib  # required by windows
        matplotlib.use('wxAGG')  # required by windows
        import matplotlib.colors as clr
        cmap = clr.LinearSegmentedColormap.from_list('from_list',
                                                     cmap,
                                                     N=n_colors)
    else:
        gscript.warning(
            _("Using Matplotlib cubehelix color table."
              " Most of cubehelix parameters ignored"))
        # we are very nice and provide a fallback
        import matplotlib.pyplot as plt
        name = 'cubehelix'
        # Matplotlib one goes from dark to light but Seaborn goes
        # the other way around by default
        if not flags['n']:
            name += '_r'
        cmap = plt.get_cmap(name, lut=n_colors)

    comments = []
    comments.append("Cubehelix color table generated using:")
    command = [sys.argv[0].split(os.path.sep)[-1]]
    command.extend(sys.argv[1:])
    comments.append("  {}".format(' '.join(command)))

    rules = mpl_cmap_to_rules(cmap,
                              n_colors=n_colors,
                              discrete=discrete,
                              comments=comments)

    if options['map']:
        rcf = ''
        for char in 'gae':
            if flags[char]:
                rcf += char
        gscript.write_command(
            'r.colors',
            map=options['map'],
            flags=rcf,
            rules='-',
            stdin=rules,
        )
    if options['output']:
        with open(options['output'], 'w') as f:
            f.write(rules)
            f.write('\n')
    elif not options['map']:
        print(rules)
Exemplo n.º 38
0
def main(options, flags):
    config_name = options['configuration']
    params = owsConnections[config_name]
    
    output = options['output']
    betriebid = options['betriebid']
    basename = 'B' + betriebid + '_'
    task = options['task']
    maxsoilloss = options['maxsoilloss']

    params['username'] = options['username']
    params['password'] = options['password']
    params['dsn'] = params['dsn'] + ' user=%s password=%s' \
        %(params['username'],params['password'])
        
    flag_b = flags['b'] #use newly defined barriers
    flag_g = flags['g'] #don't set region according to parcel data
    flag_i = flags['i'] #force reimport of base data
    flag_n = flags['n'] #don't import anything ('offline')
    flag_c = flags['c'] #don't copy results to output raster map
    flag_s = flags['s'] #calculate statistics for results

    ## define own methods for Vect and Rast classes
    from grass.pygrass.vector import VectorTopo as Vect
    # some monkey patching with own methods
    #autoimport vector data from PostGIS
        
    def autoimport(self, layer, *args, **kwargs):
        if not layer in params['layers'].keys():
            print('Coverage <%s> not available/not configured on server.' %layer )
        vinogr(dsn = params['dsn'], snap = 0.01,
            layer=params['layers'][layer],
            output=self.name, **kwargs)
    Vect.autoimport = autoimport
    
    from grass.pygrass.raster import RasterAbstractBase as Rast
    #autoimport raster from WCS
    def autoimport(self, coverage, *args, **kwargs):
        if not coverage in params['coverages'].keys():
            print('Coverage <%s> not available/not configured on server.' %coverage )
        r.in_wcs(url = params['url'], 
                username = params['username'], 
                password = params['password'],
                coverage=params['coverages'][coverage],
                output=self.name, **kwargs)
    Rast.autoimport = autoimport

    def setRegion(parcelmap,betriebid):
        ## set region to parcel layer extent + buffer
        reg = Region()
        reg.vect(parcelmap.name)
        regbuffer = 100
        reg.north += regbuffer
        reg.east += regbuffer
        reg.south -= regbuffer
        reg.west -= regbuffer
        reg.set_current()
        # set_current() not working right now
        # so using g.region() :
        g.region(n=str(reg.north), s=str(reg.south), w=str(reg.west), e=str(reg.east), res='2', flags='a',quiet=quiet)
        g.region(save='B'+betriebid,overwrite=True,quiet=quiet)
    
    def slopestats():
        slopemap = Rast(maps['elevation'].name + '.slope')
        r.slope_aspect(elevation=maps['elevation'].name, slope=slopemap.name, format='percent') 
        print('\n \n Statistics for slope <%s> (slope in %%): '%(slopemap.name))
        rsoillossstats(soilloss=slopemap.name, map=parcelmap.name, parcelnumcol='id')
        
    
    def sbare():
        rsoillossreclass.flags.u = True
        rsoillossreclass(maps['soillossbare'].name, 'soillossbare',flags='')
        
        if flag_s:
            print('\n \n Statistics for soilloss <%s> : '%(soillossbaremap.name))
            rsoillossstats(soilloss=soillossbaremap.name, 
                           map=parcelmap.name, parcelnumcol='id')
        if not flag_c:
            g.copy(rast=(soillossbaremap.name,output))
            gscript.message('Copy made to <%s> for automatic output' %(output))
    
    def sbareupdate():
        rsoillossupdate.inputs.map = parcelmap.name
        rsoillossupdate.inputs.factorold = maps['kfactor'].name
        rsoillossupdate.inputs.factorcol = 'kfactor'
        rsoillossupdate.flags.k = True
        rsoillossupdate.flags.p = True
        rsoillossupdate(soillossin=maps['soillossbare'].name, 
                        soillossout=soillossbarecorrmap.name)
        gscript.message('Soilloss for bare soil successfully updated to <%s> using parcelwise kfactor.' %(soillossbarecorrmap.name))
        if not flag_c:
            g.copy(rast=(soillossbarecorrmap.name,output))
            gscript.message('Copy made to <%s> for automatic output' %(output))
            
        rsoillossreclass(soillossbarecorrmap.name, 'soillossbare',flags='')
        gscript.message('Reclassified and colored maps found in <%s.3> and <%s.9> .'%(soillossbarecorrmap.name, soillossbarecorrmap.name))
        
        if flag_s:
            print('\n \n Statistics for soilloss on bare soil <%s> : '%(soillossgrowmap))
            rsoillossstats(soilloss=soillossbarecorrmap.name, map=parcelmap.name, parcelnumcol='id')
      
    def sgrow():
        if soillossbarecorrmap.exist():
            rsoillossgrow.inputs.soillossbare = soillossbarecorrmap.name
        else: rsoillossgrow.inputs.soillossbare = soillossbaremap.name
        rsoillossgrow.inputs.map = parcelmap.name
        rsoillossgrow.inputs.factorcols = (params['colnames'][('cfactor')],)
        rsoillossgrow.inputs.factorcols += (params['colnames'][('pfactor')],)
        rsoillossgrow(soillossgrow=soillossgrowmap.name)
        gscript.message('Soilloss for grown soil successfully calculated to <%s> using parcelwise C and P factor.' %(soillossgrowmap))
                
        if not flag_c:
            g.copy(rast=(soillossgrowmap.name,output))
            gscript.message('Copy made to <%s> for automatic output' %(output))

        rsoillossreclass(soillossgrowmap.name, 'soillossgrow',flags='')
        gscript.message('Reclassified and colored maps found in <%s.3> and <%s.9> .'%(soillossgrowmap.name, soillossgrowmap.name))

        if flag_s:
            print('\n \n Statistics for soilloss on grown soil <%s> : '%(soillossgrowmap))
            rsoillossstats(soilloss=soillossgrowmap.name, map=parcelmap.name, parcelnumcol='id')
                    
    def scpmax():
        if soillossbarecorrmap.exist():
            rsoillosscpmax.inputs.soillossbare = soillossbarecorrmap.name
        else: rsoillosscpmax.inputs.soillossbare = soillossbaremap.name
        
        rsoillosscpmax.inputs.maxsoilloss=maxsoilloss
        rsoillosscpmax(cpmax=soillosscpmaxmap.name)
        
        if not flag_c:
            g.copy(rast=(soillosscpmaxmap.name,output))
            gscript.message('Copy made to <%s> for automatic output' %(output))
        
        if flag_s:
            print('\n \n Statistics for <%s> : '%(soillosscpmaxmap))
            rsoillossstats(soilloss=soillosscpmaxmap.name, map=parcelmap.name, parcelnumcol='id')
             
    def smeasure():
        gscript.message('Import <%s>' % measuremap.name)
        measuremap.autoimport('measures', overwrite=True, quiet=quiet,
                              where="betrieb_id = %s" % betriebid)
        
        soillossbaremap = maps['soillossbare']
        kfactormap = maps['kfactor']

        if soillossbarecorrmap.exist():
            gscript.message('Using updated soillossbare map.')
            soillossbaremap = soillossbarecorrmap
            kfactormap = Rast(parcelmap.name + '.kfactor')
        
        if flag_b:
            measurebarriermap = Vect(measuremap.name + '_barrier')
            v.extract(input=measuremap.name, where="barrier = 1",
                      output=measurebarriermap.name)
            
            measurefieldblockmap = Vect(measuremap.name + '_fieldblocks')
            v.overlay(ainput=maps['fieldblocks'].name,
                      binput=measurebarriermap.name,\
                      operator='not', 
                      output=measurefieldblockmap.name)
            
            rsoillossbare.inputs.elevation = maps['elevation'].name
            rsoillossbare.inputs.rfactor = maps['rfactor'].name
            rsoillossbare.inputs.kfactor = kfactormap.name
            rsoillossbare.inputs.map = measurefieldblockmap.name
            rsoillossbare.inputs.constant_m = '0.6'
            rsoillossbare.inputs.constant_n = '1.4'


            rsoillossbare.flags.r = True
            rsoillossbare(soillossbare=soillossbarebarriermap.name)
            soillossbaremap = soillossbarebarriermap

        parcelpfactor = parcelmap.name + '.pfactor'
        parcelcfactor = parcelmap.name + '.cfactor'
        v.to_rast(input=parcelmap.name, use='attr', attrcolumn='pfactor',
                  output=parcelpfactor)
        v.to_rast(input=parcelmap.name, use='attr', attrcolumn='cfactor',
                  output=parcelcfactor)
                  
        measurepfactor = measuremap.name + '.pfactor'
        measurecfactor = measuremap.name + '.cfactor'
        v.to_rast(input=measuremap.name, use='attr', attrcolumn='pfactor',
                  output=measurepfactor)
        v.to_rast(input=measuremap.name, use='attr', attrcolumn='cfactor',
                  output=measurecfactor)

        pfactor = parcelmap.name + '.pfactor.measure'
        cfactor = parcelmap.name + '.cfactor.measure'

        r.patch(input=(measurepfactor,parcelpfactor), output=pfactor)
        r.patch(input=(measurecfactor,parcelcfactor), output=cfactor)
        rsoillossgrow.inputs.soillossbare = soillossbaremap.name
        rsoillossgrow.inputs.cfactor = pfactor
        rsoillossgrow.inputs.pfactor = cfactor
        rsoillossgrow(soillossgrow=soillossmeasuremap.name)
        
        rsoillossreclass(soillossmeasuremap.name, 'soillossgrow',flags='')
        gscript.message('Reclassified and colored maps found in <%s.3> and <%s.9> .'%(soillossmeasuremap.name, soillossmeasuremap.name))

        if flag_s:
            gscript.message('\n \n Statistics for soilloss on grown soil <%s> : '%(soillossgrowmap))
            rsoillossstats(soilloss=soillossmeasuremap.name, map=parcelmap.name, parcelnumcol='id')
        
        if not flag_c:
            g.copy(rast=(soillossmeasuremap.name,output))
            gscript.message('Copy made to <%s> for automatic output' %(output))
    
#######################################################################
## BEGIN main controls
    curregion = Mapset()
    permanent = Mapset('PERMANENT')
    if curregion.name == permanent.name:
        gscript.fatal("Please change mapset. It can be dangerous to use this prealpha-module in PERMANENT")
            
    parcelmap = Vect(basename+'parcels')  
    if not flag_n:
        parcelmap.autoimport('parcels', overwrite=True, quiet=quiet,
                             where="betrieb_id = %s" % betriebid)
        #if parcelmap.popen.returncode <> 0:
        #   gscript.fatal('Import der Parzellendaten gescheitert.')
        
    if not flag_g: 
        setRegion(parcelmap,betriebid)
        gscript.verbose('Region set to parcels extent + 100 raster cells. \
            \n Resolution: raster cell = 2 x 2 meter.')
            
    basedata_rast = ('elevation','soillossbare','kfactor','rfactor')
    basedata_vect = ('fieldblocks',)
    
    maps = {}
    for map in (basedata_rast):
        mapname = basename + map
        maps[map] = Rast(mapname)
        
    for map in (basedata_vect):
        mapname = basename + map
        maps[map] = Vect(mapname)
      
    if not flag_n:
        vinogr.flags.r = True
        vinogr.inputs.where = ""

        for mapname in maps.keys():
            map = maps[mapname]
            if map.exist() and flag_i:
                map.remove()
            if not map.exist():
                map.autoimport(mapname)
                
    
    soillossbaremap = maps['soillossbare']
    
    soillossbarecorrmap = Rast(maps['soillossbare'].name +'.update')
    soillossgrowmap = Rast(basename+'soillossgrow')
    soillosscpmaxmap = Rast(basename+'cpmax')
    measuremap = Vect(basename+'measures')
    soillossmeasuremap = Rast(basename+'soillossgrow.measure')
    soillossbarebarriermap = Rast(basename+'soillossbare.barrier')
    
    
    gscript.error('Import ok. Beginning task %s ...' %task)

    tasks = {'soilloss.bare' : sbare,
        'soilloss.bare.update': sbareupdate,
        'soilloss.grow' : sgrow,
        'soilloss.grow.measure' : smeasure,
        'soilloss.cpmax' : scpmax,
        'slope.stats' : slopestats
        }
    
    if task in tasks:
        tasks[task]()
    else:
        gscript.fatal('Please choose a valid task')
Exemplo n.º 39
0
    def _parse_mtd_file(mtd_file):
        # Lazy import
        import numpy as np

        try:
            from xml.etree import ElementTree
            from datetime import datetime
        except ImportError as e:
            gs.fatal(_("Unable to parse metadata file. {}").format(e))

        meta = {}
        meta["timestamp"] = None
        with io.open(mtd_file, encoding="utf-8") as fd:
            root = ElementTree.fromstring(fd.read())
            nsPrefix = root.tag[:root.tag.index("}") + 1]
            nsDict = {"n1": nsPrefix[1:-1]}
            node = root.find("n1:General_Info", nsDict)
            if node is not None:
                tile_id = (node.find("TILE_ID").text
                           if node.find("TILE_ID") is not None else "")
                if not tile_id.startswith("S2"):
                    gs.fatal(
                        _("Register file can be created only for Sentinel-2 data."
                          ))

                meta["SATELLITE"] = tile_id.split("_")[0]

                # get timestamp
                ts_str = node.find("SENSING_TIME", nsDict).text
                meta["timestamp"] = datetime.strptime(ts_str,
                                                      "%Y-%m-%dT%H:%M:%S.%fZ")

            # Get Quality metadata
            node = root.find("n1:Quality_Indicators_Info", nsDict)
            image_qi = node.find("Image_Content_QI")
            if image_qi:
                for qi in list(image_qi):
                    meta[qi.tag] = qi.text

            # Get Geometric metadata
            node = root.find("n1:Geometric_Info", nsDict)
            tile_angles = node.find("Tile_Angles")
            if tile_angles is not None:
                # In L1C products it can be necessary to compute angles from grid
                va_list = tile_angles.find("Mean_Viewing_Incidence_Angle_List")
                if va_list is not None:
                    for it in list(va_list):
                        band = it.attrib["bandId"]
                        for i in list(it):
                            if "ZENITH_ANGLE" in i.tag or "AZIMUTH_ANGLE" in i.tag:
                                meta[i.tag + "_" + band] = i.text
                    sa_grid = tile_angles.find("Sun_Angles_Grid")
                    if sa_grid is not None:
                        for ssn in list(sa_grid):
                            if ssn.tag == "Zenith":
                                for sssn in list(ssn):
                                    if sssn.tag == "Values_List":
                                        mean_zenith = np.mean(
                                            np.array(
                                                [
                                                    np.array(
                                                        ssssn.text.split(" "),
                                                        dtype=np.float,
                                                    ) for ssssn in list(sssn)
                                                ],
                                                dtype=np.float,
                                            ))
                                        meta[
                                            "MEAN_SUN_ZENITH_GRID_ANGLE"] = mean_zenith
                            elif ssn.tag == "Azimuth":
                                for sssn in list(ssn):
                                    if sssn.tag == "Values_List":
                                        mean_azimuth = np.mean(
                                            np.array(
                                                [
                                                    np.array(
                                                        ssssn.text.split(" "),
                                                        dtype=np.float,
                                                    ) for ssssn in list(sssn)
                                                ],
                                                dtype=np.float,
                                            ))
                                        meta[
                                            "MEAN_SUN_AZIMUTH_GRID_ANGLE"] = mean_azimuth
                    sa_mean = tile_angles.find("Mean_Sun_Angle")
                    if sa_mean is not None:
                        for it in list(sa_mean):
                            if it.tag == "ZENITH_ANGLE" or it.tag == "AZIMUTH_ANGLE":
                                meta["MEAN_SUN_" + it.tag] = it.text
            else:
                gs.warning(
                    "Unable to extract tile angles from <{}>".format(mtd_file))
        if not meta["timestamp"]:
            gs.error(
                _("Unable to determine timestamp from <{}>").format(mtd_file))

        return meta
    def _parse_mtd_file(mtd_file):
        # Lazy import
        import numpy as np
        try:
            from xml.etree import ElementTree
            from datetime import datetime
        except ImportError as e:
            gs.fatal(_("Unable to parse metadata file. {}").format(e))

        meta = {}
        meta['timestamp'] = None
        with io.open(mtd_file, encoding='utf-8') as fd:
            root = ElementTree.fromstring(fd.read())
            nsPrefix = root.tag[:root.tag.index('}')+1]
            nsDict = {'n1':nsPrefix[1:-1]}
            node = root.find('n1:General_Info', nsDict)
            if node is not None:
                tile_id = node.find('TILE_ID').text if node.find('TILE_ID') is not None else ''
                if not tile_id.startswith('S2'):
                    gs.fatal(_("Register file can be created only for Sentinel-2 data."))

                meta['SATELLITE'] = tile_id.split('_')[0]

                # get timestamp
                ts_str = node.find('SENSING_TIME', nsDict).text
                meta['timestamp'] = datetime.strptime(
                    ts_str, "%Y-%m-%dT%H:%M:%S.%fZ"
                )

            # Get Quality metadata
            node = root.find('n1:Quality_Indicators_Info', nsDict)
            image_qi = node.find('Image_Content_QI')
            if image_qi:
                for qi in list(image_qi):
                    meta[qi.tag] = qi.text

            # Get Geometric metadata
            node = root.find('n1:Geometric_Info', nsDict)
            tile_angles = node.find('Tile_Angles')
            if tile_angles is not None:
                # In L1C products it can be necessary to compute angles from grid
                va_list = tile_angles.find('Mean_Viewing_Incidence_Angle_List')
                if va_list is not None:
                    for it in list(va_list):
                        band = it.attrib['bandId']
                        for i in list(it):
                            if 'ZENITH_ANGLE' in i.tag or 'AZIMUTH_ANGLE' in i.tag:
                                meta[i.tag + '_' + band] = i.text
                    sa_grid = tile_angles.find('Sun_Angles_Grid')
                    if sa_grid is not None:
                        for ssn in list(sa_grid):
                            if ssn.tag == 'Zenith':
                                for sssn in list(ssn):
                                    if sssn.tag == 'Values_List':
                                        mean_zenith = np.mean(np.array([np.array(ssssn.text.split(' '), dtype=np.float) for ssssn in list(sssn)], dtype=np.float))
                                        meta['MEAN_SUN_ZENITH_GRID_ANGLE'] = mean_zenith
                            elif ssn.tag == 'Azimuth':
                                for sssn in list(ssn):
                                    if sssn.tag == 'Values_List':
                                        mean_azimuth = np.mean(np.array([np.array(ssssn.text.split(' '), dtype=np.float) for ssssn in list(sssn)], dtype=np.float))
                                        meta['MEAN_SUN_AZIMUTH_GRID_ANGLE'] = mean_azimuth
                    sa_mean = tile_angles.find('Mean_Sun_Angle')
                    if sa_mean is not None:
                        for it in list(sa_mean):
                            if it.tag == 'ZENITH_ANGLE' or it.tag == 'AZIMUTH_ANGLE':
                                meta['MEAN_SUN_' + it.tag] = it.text
            else:
                gs.warning('Unable to extract tile angles from <{}>'.format(mtd_file))
        if not meta['timestamp']:
            gs.error(_("Unable to determine timestamp from <{}>").format(mtd_file))

        return meta