Beispiel #1
0
def main():

    if 'GRASS' in options['driver']:
        grass.debug("Using GRASS driver")
        from wms_drv import WMSDrv
        wms = WMSDrv()
    elif 'GDAL' in options['driver']:
        grass.debug("Using GDAL WMS driver")
        from wms_gdal_drv import WMSGdalDrv
        wms = WMSGdalDrv()

    if flags['c']:
        wms.GetCapabilities(options)
    else:
        from wms_base import GRASSImporter
        options['region'] = GetRegionParams(options['region'])
        fetched_map = wms.GetMap(options, flags)

        grass.message(_("Importing raster map into GRASS..."))
        if not fetched_map:
            grass.warning(_("Nothing to import.\nNo data has been downloaded from wms server."))
            return
        importer = GRASSImporter(options['output'])
        importer.ImportMapIntoGRASS(fetched_map)

    return 0
Beispiel #2
0
    def _checkMatSet(self, mat_set):
        """!Check <TileMatrixSet>.
        """
        mat_set_id = mat_set.find(self.xml_ns.NsOws('Identifier'))
        if mat_set_id is None or not mat_set_id.text:
            return False

        mat_set_srs = mat_set.find(self.xml_ns.NsOws('SupportedCRS'))
        if mat_set_srs is None or \
           not mat_set_srs.text:
            return False

        tile_mats = mat_set.findall(self.xml_ns.NsWmts('TileMatrix'))
        if not tile_mats:
            return False

        for t_mat in tile_mats:
            if not self._checkMat(t_mat):
                grass.debug('Removed invalid <TileMatrix> element.', 4)
                mat_set.remove(t_mat)

        tile_mats = mat_set.findall(self.xml_ns.NsWmts('TileMatrix'))
        if not tile_mats:
            return False

        return True
Beispiel #3
0
    def _checkLayer(self, layer):
        """!Check <Layer> element.
        """
        layer_id = layer.find(self.xml_ns.NsOws('Identifier'))
        if layer_id is None or not layer_id.text:
            return False

        mat_set_links = layer.findall(self.xml_ns.NsWmts('TileMatrixSetLink'))
        if not mat_set_links:
            return False

        styles = layer.findall(self.xml_ns.NsWmts('Style'))
        if not styles:
            return False

        for s in styles:
            s_name = s.find(self.xml_ns.NsOws('Identifier'))
            if s_name is None or not s_name.text:
                grass.debug('Removed invalid <Style> element.', 4)
                layer.remove(s_name)

        contents = self.getroot().find(self.xml_ns.NsWmts('Contents'))
        mat_sets = contents.findall(self.xml_ns.NsWmts('TileMatrixSet'))

        for link in mat_set_links:
            # <TileMatrixSetLink> does not point to existing  <TileMatrixSet>
            if not self._checkMatSetLink(link, mat_sets):
                grass.debug('Removed invalid <TileMatrixSetLink> element.', 4)
                layer.remove(link)

        return True
Beispiel #4
0
def list_layers():
    """Get list of available layers from WMS server"""
    qstring = "service=WMS&request=GetCapabilities&" + options['wmsquery']
    grass.debug("POST-data: %s" % qstring)
    
    # download capabilities file
    grass.verbose("List of layers for server <%s>:" % options['mapserver'])
    url = options['mapserver'] + '?' + qstring
    try:
        if options['cap_file']:
            cap_file, headers = urllib.urlretrieve(url, options['cap_file'])
        else:
            cap_file = urllib.urlopen(url, options['mapserver'] + '?' + qstring)
    except IOError:
        grass.fatal(_("Unable to get capabilities of '%s'") % options['mapserver'])
    
    # check DOCTYPE first
    if options['cap_file']:
        if headers['content-type'] != 'application/vnd.ogc.wms_xml':
            grass.fatal(_("Unable to get capabilities: %s") % url)
    else:
        if cap_file.info()['content-type'] != 'application/vnd.ogc.wms_xml':
            grass.fatal(_("Unable to get capabilities: %s") % url)

    # parse file with sax
    cap_xml = wms_parse.ProcessCapFile()
    try:
        xml.sax.parse(cap_file, cap_xml)
    except xml.sax.SAXParseException, err:
        grass.fatal(_("Reading capabilities failed. "
                      "Unable to parse XML document: %s") % err)
Beispiel #5
0
    def _createXML(self):
        """!Create XML for GDAL WCS driver

        @return path to XML file
        """
        self._debug("_createXML", "started")

        gdal_wcs = etree.Element("WCS_GDAL")
        server_url = etree.SubElement(gdal_wcs, "ServiceUrl")
        server_url.text =self.params['url']

        version = etree.SubElement(gdal_wcs, "Version")
        version.text =self.params['version']

        coverage = etree.SubElement(gdal_wcs, "CoverageName")
        coverage.text = self.params['coverage']

        if self.params['username']:
            userpwd = etree.SubElement(gdal_wcs,'UserPwd')
            userpwd.text = self.params['username']+':'+ self.params['password']

        xml_file = self._tempfile()

        etree_gdal_wcs = etree.ElementTree(gdal_wcs)
        gscript.debug(etree_gdal_wcs)
        etree.ElementTree(gdal_wcs).write(xml_file)

        self._debug("_createXML", "finished -> %s" % xml_file)

        return xml_file
Beispiel #6
0
def main():
    url = options['url']
    coverage = options['coverage']
    output = options['output']
    location = options['location']
    region = options['region']
    urlparams = options['urlparams']
    username = options['username']
    password = options['password']
    flag_c = flags['c']
    flag_e = flags['e']


    options['version']="1.0.0" # right now only supported version, therefore not in GUI

    gscript.debug("Using GDAL WCS driver")
    wcs = WCSGdalDrv()  # only supported driver

    if flag_c:
        wcs.GetCapabilities(options,flags)

    elif flag_e:
        external_map = wcs.LinkMap(options,flags)

    else:
        gscript.message("Importing raster map into GRASS...")
        fetched_map = wcs.GetMap(options,flags)
        if not fetched_map:
            gscript.warning(_("Nothing imported.\n Data not has been downloaded from wcs server."))
            return 1

    return 0
Beispiel #7
0
    def _checkLayer(self, layer):
        """!Check <TiledGroup>/<TiledGroups> elements.
        """
        if layer.tag == 'TiledGroups':
            return True

        name = layer.find('Name')
        if name is None or not name.text:
            return False

        t_patts = layer.findall('TilePattern')

        for patt in t_patts:
            urls = self._getUrls(patt)
            for url in urls:
                if not self.gettilepatternurldata(url):
                    urls.remove(url)

            # check if there are any valid urls
            if not urls:
                grass.debug('<TilePattern>  was removed. It has no valid url.', 4)
                layer.remove(patt)
            patt.text = '\n'.join(urls)

        t_patts = layer.findall('TilePattern')
        if not t_patts:
            return False

        return True
Beispiel #8
0
    def _checkMatSetLink(self, link, mat_sets):
        """!Check <TileMatrixSetLink> element.
        """
        mat_set_link_id = link.find(self.xml_ns.NsWmts('TileMatrixSet')).text
        found = False

        for mat_set in mat_sets:
            mat_set_id = mat_set.find(self.xml_ns.NsOws('Identifier')).text

            if mat_set_id != mat_set_link_id:
                continue

            # the link points to existing <TileMatrixSet>
            found = True

            tile_mat_set_limits = link.find(self.xml_ns.NsWmts('TileMatrixSetLimits'))
            if tile_mat_set_limits is None:
                continue

            tile_mat_limits = tile_mat_set_limits.findall(self.xml_ns.NsWmts('TileMatrixLimits'))
            for limit in tile_mat_limits:
                if not self._checkMatSetLimit(limit):
                    grass.debug('Removed invalid <TileMatrixLimits> element.', 4)
                    tile_mat_limits.remove(limit)

            # are there any <TileMatrixLimits> elements after the check
            tile_mat_limits = tile_mat_set_limits.findall(self.xml_ns.NsWmts('TileMatrixLimits'))
            if not tile_mat_limits:
                grass.debug('Removed invalid <TileMatrixSetLimits> element.', 4)
                link.remove(tile_mat_set_limits)

        if not found:
            return False

        return True
def get_percentile_mp(map, percentiles, conn):
    # Process() doesn't like storing connection parts in
    #  separate dictionaries, only wants to pass through tuples,
    #  so instead of just sending the sending the pipe we have to
    #  send both parts then keep the one we want.  ??
    output_pipe, input_pipe = conn
    input_pipe.close()
    result = get_percentile(map, percentiles)
    grass.debug('child (%s) (%.1f, %.1f)' % (map, result[0], result[1]))
    output_pipe.send(result)
    output_pipe.close()
def main():
    if flags['d']:
        grass.debug("Using own driver")
        from wms_drv import WMSDrv
        wms = WMSDrv(options, flags)
    else:
        grass.debug("Using GDAL WMS driver")
        from wms_gdal_drv import WMSGdalDrv
        wms = WMSGdalDrv(options, flags)


    
    return 0
Beispiel #11
0
    def __init__(self, cap_file):
        """!Parse NASA OnEarth tile service file.
            If the file cannot be parsed it raises xml.etree.ElementTree.ParseError.

        The class also removes elements which are in invalid form and are needed
        by wxGUI capabilities dialog or for creation of GetMap request by GRASS WMS library.

        @param cap_file - capabilities file
        """
        BaseCapabilitiesTree.__init__(self, cap_file)

        grass.debug('Checking OnEarth capabilities tree.', 4)

        self._checkLayerTree(self.getroot())

        grass.debug('Check if OnEarth capabilities tree was finished.', 4)
Beispiel #12
0
    def _checkLayerTree(self, parent_layer, first=True):
        """!Recursively check layer tree.
        """
        if first:
            tiled_patterns = self._find(parent_layer, 'TiledPatterns')
            layers = tiled_patterns.findall('TiledGroup')
            layers += tiled_patterns.findall('TiledGroups')
            parent_layer = tiled_patterns
        else:
            layers = parent_layer.findall('TiledGroup')
            layers += parent_layer.findall('TiledGroups')

        for l in layers:
            if not self._checkLayer(l):
                grass.debug(('Removed invalid <%s> element.' % l.tag), 4)
                parent_layer.remove(l)
            if l.tag == 'TiledGroups':
                self._checkLayerTree(l, False)
Beispiel #13
0
    def _fetchCapabilities(self, options):
        """!Download capabilities from WMS server
        """
        cap_url = options['url'].strip()

        if "?" in cap_url:
            cap_url += "&"
        else:
            cap_url += "?"

        if 'WMTS' in options['driver']:
            cap_url += "SERVICE=WMTS&REQUEST=GetCapabilities&VERSION=1.0.0"
        elif 'OnEarth' in options['driver']:
            cap_url += "REQUEST=GetTileService"
        else:
            cap_url += "SERVICE=WMS&REQUEST=GetCapabilities&VERSION=" + options['wms_version']

        if options['urlparams']:
            cap_url += "&" + options['urlparams']

        grass.debug('Fetching capabilities file.\n%s' % cap_url)

        try:
            cap = self._fetchDataFromServer(cap_url, options['username'], options['password'])
        except (IOError, HTTPException) as e:
            if isinstance(e, HTTPError) and e.code == 401:
                grass.fatal(
                    _("Authorization failed to <%s> when fetching capabilities") %
                    options['url'])
            else:
                msg = _("Unable to fetch capabilities from <{}>. Reason: ").format(
                    options['url'])

                if hasattr(e, 'reason'):
                    msg += '{}'.format(e.reason)
                else:
                    msg += '{}'.format(e)

                grass.fatal(msg)

        grass.debug('Fetching capabilities OK')
        return cap
Beispiel #14
0
def main():
    map = options['map']
    layer = options['layer']
    columns = options['columns']
    columns = [col.strip() for col in columns.split(',')]

    # does map exist in CURRENT mapset?
    mapset = grass.gisenv()['MAPSET']
    exists = bool(grass.find_file(map, element='vector', mapset=mapset)['file'])

    if not exists:
        grass.fatal(_("Vector map <%s> not found in current mapset") % map)

    try:
        f = grass.vector_db(map)[int(layer)]
    except KeyError:
        grass.fatal(
            _("There is no table connected to this map. Run v.db.connect or v.db.addtable first."))

    table = f['table']
    database = f['database']
    driver = f['driver']
    column_existing = grass.vector_columns(map, int(layer)).keys()

    for col in columns:
        if not col:
            grass.fatal(_("There is an empty column. Did you leave a trailing comma?"))
        col_name = col.split(' ')[0].strip()
        if col_name in column_existing:
            grass.error(_("Column <%s> is already in the table. Skipping.") % col_name)
            continue
        grass.verbose(_("Adding column <%s> to the table") % col_name)
        p = grass.feed_command('db.execute', input='-', database=database, driver=driver)
        p.stdin.write("ALTER TABLE %s ADD COLUMN %s" % (table, col))
        grass.debug("ALTER TABLE %s ADD COLUMN %s" % (table, col))
        p.stdin.close()
        if p.wait() != 0:
            grass.fatal(_("Unable to add column <%s>.") % col)

    # write cmd history:
    grass.vector_history(map)
Beispiel #15
0
    def LinkMap(self, options, flags):
        """!Download data from WCS server.

        @return mapname with downloaded data
        """
        self._debug("GetMap", "started")

        self._initializeParameters(options, flags)
        self.xml_file = self._createXML()
        fin = open(self.xml_file, "r")
        gscript.debug(fin.readlines())

        r.external(input=self.xml_file, output=self.params['output'])
        gscript.try_remove(self.xml_file)


        if p != 0:
            gscript.fatal("Adding WCS as external raster failed.")
            return

        return self.params['output']
Beispiel #16
0
    def _initLayer(self, layer, parent_layer):
        """Inherit elements from parent layer

        @param layer - <Layer> element which inherits
        @param parent_layer - <Layer> element which is inherited from
        """
        if parent_layer is not None:
            replaced_elements = [["EX_GeographicBoundingBox", "replace"],
                                 ["Attribution", "replace"],
                                 ["MinScaleDenominator", "replace"],
                                 ["MaxScaleDenominator", "replace"],
                                 ["AuthorityURL", "add"]]

            for element in replaced_elements:
                elems = layer.findall(self.xml_ns.Ns(element[0]))

                if len(elems) != 0 or element[1] == "add":
                    for e in parent_layer.findall(self.xml_ns.Ns(element[0])):
                        layer.append(e)

            inh_arguments = ["queryable", "cascaded", "opaque",
                             "noSubsets", "fixedWidth", "fixedHeight"]

            for attr in parent_layer.attrib:
                if attr not in layer.attrib and attr in inh_arguments:
                    layer.attrib[attr] = parent_layer.attrib[attr]

            self._inhNotSame(self.proj_tag, "element_content", layer, parent_layer)
            self._inhNotSame("BoundingBox", "attribute", layer, parent_layer, self.proj_tag)

            # remove invalid Styles
            styles = layer.findall(self.xml_ns.Ns('Style'))
            for s in styles:
                s_name = s.find(self.xml_ns.Ns('Name'))
                if s_name is None or not s_name.text:
                    grass.debug('Removed invalid <Style> element.', 4)
                    layer.remove(s)

            self._inhNotSame("Style", "child_element_content", layer, parent_layer, "Name")
            self._inhNotSame("Dimension", "attribute", layer, parent_layer, "name")
def main():

    # process command options
    input = options['input']
    if not gs.find_file(input)['file']:
        gs.fatal(_("Raster map <%s> not found") % input)

    output = options['output']
    if gs.find_file(output)['file'] and not gs.overwrite():
        gs.fatal(_("Output map <%s> already exists") % output)

    # set aside region for internal use
    gs.use_temp_region()

    # subset input if desired
    region = options.get('region')
    if region:
        if not gs.find_file(region)['file']:
            gs.fatal(_("Raster map <%s> not found") % region)
        gs.message("Setting region to %s" % region, flag='i')
        gs.run_command('g.region', rast=region, align=input)
    else:
        gs.message("Using existing GRASS region", flag='i')
    gs.debug('='*50)
    gs.debug('\n'.join(gs.parse_command('g.region', 'p').keys()))
    gs.debug('='*50)

    calculate_noise(input, output)

    # restore original region
    gs.del_temp_region()

    return None
Beispiel #18
0
    def __init__(self, cap_file, force_version=None):
        """!Parses WMS capabilities file.
            If the capabilities file cannot be parsed if it raises xml.etree.ElementTree.ParseError.

        The class manges inheritance in 'Layer' elements. Inherited elements
        are added to 'Layer' element.
        The class also removes elements which are in invalid form and are needed
        by wxGUI capabilities dialog.

        @param cap_file - capabilities file
        @param force_version - force capabilities file version (1.1.1, 1.3.0)
        """
        BaseCapabilitiesTree.__init__(self, cap_file)
        self.xml_ns = WMSXMLNsHandler(self)

        grass.debug('Checking WMS capabilities tree.', 4)

        if not "version" in self.getroot().attrib:
            raise ParseError(_("Missing version attribute root node "
                               "in Capabilities XML file"))
        else:
            wms_version = self.getroot().attrib["version"]

        if wms_version == "1.3.0":
            self.proj_tag = "CRS"
        else:
            self.proj_tag = "SRS"

        if force_version is not None:
            if wms_version != force_version:
                raise ParseError(_("WMS server does not support '%s' version.") % wms_version)

        capability = self._find(self.getroot(), "Capability")
        root_layer = self._find(capability, "Layer")

        self._checkFormats(capability)
        self._checkLayerTree(root_layer)

        grass.debug('Check of WMS capabilities tree was finished.', 4)
Beispiel #19
0
def import_aster(proj, srcfile, tempfile, band):
    #run gdalwarp with selected options (must be in $PATH)
    #to translate aster image to geotiff
    grass.message(_("Georeferencing aster image ..."))
    grass.debug("gdalwarp -t_srs %s %s %s" % (proj, srcfile, tempfile))

    if platform.system() == "Darwin":
        cmd = ["arch", "-i386", "gdalwarp", "-t_srs", proj, srcfile, tempfile ]
    else:
        cmd = ["gdalwarp", "-t_srs", proj, srcfile, tempfile ]
    p = grass.call(cmd)
    if p != 0:
        #check to see if gdalwarp executed properly
        return

    #import geotiff to GRASS
    grass.message(_("Importing into GRASS ..."))
    outfile = "%s.%s" % (output, band)
    grass.run_command("r.in.gdal", input = tempfile, output = outfile)

    # write cmd history
    grass.raster_history(outfile)
Beispiel #20
0
    def _fetchCapabilities(self, options):
        """!Download capabilities from WMS server
        """
        cap_url = options["url"].strip()

        if "?" in cap_url:
            cap_url += "&"
        else:
            cap_url += "?"

        if "WMTS" in options["driver"]:
            cap_url += "SERVICE=WMTS&REQUEST=GetCapabilities&VERSION=1.0.0"
        elif "OnEarth" in options["driver"]:
            cap_url += "REQUEST=GetTileService"
        else:
            cap_url += "SERVICE=WMS&REQUEST=GetCapabilities&VERSION=" + options["wms_version"]

        if options["urlparams"]:
            cap_url += "&" + options["urlparams"]

        grass.debug("Fetching capabilities file.\n%s" % cap_url)

        try:
            cap = self._fetchDataFromServer(cap_url, options["username"], options["password"])
        except (IOError, HTTPException) as e:
            if urllib2.HTTPError == type(e) and e.code == 401:
                grass.fatal(_("Authorization failed to <%s> when fetching capabilities") % options["url"])
            else:
                msg = _("Unable to fetch capabilities from <%s>: %s") % (options["url"], e)

                if hasattr(e, "reason"):
                    msg += _("\nReason: ") + e.reason

                grass.fatal(msg)

        grass.debug("Fetching capabilities OK")
        return cap
Beispiel #21
0
def main():
    options, flags = grass.parser()
    satellite = options['sensor']
    output_basename = options['output']
    inputs = options['input'].split(',')
    num_of_bands = used_bands[satellites.index(satellite)]
    if len(inputs) != num_of_bands:
        grass.fatal(_("The number of input raster maps (bands) should be %s") % num_of_bands)

    bands = {}
    for i, band in enumerate(inputs):
        band_num = i + 1
        bands['in' + str(band_num) + 'band'] = band
    grass.debug(1, bands)

    # core tasseled cap components computation
    calcN(output_basename, bands, satellite)

    # assign "Data Description" field in all four component maps
    for i, comp in enumerate(names):
        grass.run_command('r.support', map="%s.%d" % (output_basename, i + 1),
                          description="Tasseled Cap %d: %s" % (i + 1, comp))

    grass.message(_("Tasseled Cap components calculated"))
Beispiel #22
0
def main():
    red = options["red"]
    green = options["green"]
    blue = options["blue"]
    brightness = options["strength"]
    full = flags["f"]
    preserve = flags["p"]
    reset = flags["r"]

    # 90 or 98? MAX value controls brightness
    # think of percent (0-100), must be positive or 0
    # must be more than "2" ?

    if full:
        for i in [red, green, blue]:
            grass.run_command("r.colors", map=i, color="grey")
        sys.exit(0)

    if reset:
        for i in [red, green, blue]:
            grass.run_command("r.colors", map=i, color="grey255")
        sys.exit(0)

    if not preserve:
        for i in [red, green, blue]:
            grass.message(_("Processing <%s>...") % i)
            v0 = get_percentile(i, 2)
            v1 = get_percentile(i, brightness)
            grass.debug("<%s>:  min=%f   max=%f" % (i, v0, v1))
            set_colors(i, v0, v1)
    else:
        all_max = 0
        all_min = 999999
        for i in [red, green, blue]:
            grass.message(_("Processing <%s>...") % i)
            v0 = get_percentile(i, 2)
            v1 = get_percentile(i, brightness)
            grass.debug("<%s>:  min=%f   max=%f" % (i, v0, v1))
            all_min = min(all_min, v0)
            all_max = max(all_max, v1)
        grass.debug("all_min=%f   all_max=%f" % (all_min, all_max))
        for i in [red, green, blue]:
            set_colors(i, v0, v1)

        # write cmd history:
    mapset = grass.gisenv()["MAPSET"]
    for i in [red, green, blue]:
        if grass.find_file(i)["mapset"] == mapset:
            grass.raster_history(i)
Beispiel #23
0
    def __init__(self, cap_file):
        """!Parses WMTS capabilities file.
            If the capabilities file cannot be parsed it raises xml.etree.ElementTree.ParseError.

        The class also removes elements which are in invalid form and are needed
        by wxGUI capabilities dialog or for creation of GetTile request by GRASS WMS library.

        @param cap_file - capabilities file
        """
        BaseCapabilitiesTree.__init__(self, cap_file)
        self.xml_ns = WMTSXMLNsHandler()

        grass.debug('Checking WMTS capabilities tree.', 4)

        contents = self._find(self.getroot(), 'Contents', self.xml_ns.NsWmts)

        tile_mat_sets = self._findall(contents, 'TileMatrixSet', self.xml_ns.NsWmts)

        for mat_set in tile_mat_sets:
            if not self._checkMatSet(mat_set):
                grass.debug('Removed invalid <TileMatrixSet> element.', 4)
                contents.remove(mat_set)

        # are there any <TileMatrixSet> elements after the check
        self._findall(contents, 'TileMatrixSet', self.xml_ns.NsWmts)

        layers = self._findall(contents, 'Layer', self.xml_ns.NsWmts)
        for l in layers:
            if not self._checkLayer(l):
                grass.debug('Removed invalid <Layer> element.', 4)
                contents.remove(l)

        # are there any <Layer> elements after the check
        self._findall(contents, 'Layer', self.xml_ns.NsWmts)

        grass.debug('Check of WMTS capabilities tree was finished.', 4)
def main():

    # process command options
    input = options['input']
    if not gs.find_file(input)['file']:
        gs.fatal(_("Raster map <%s> not found") % input)

    smooth = options['output']
    if gs.find_file(smooth)['file'] and not gs.overwrite():
        gs.fatal(_("Output map <%s> already exists") % smooth)

    sd = options['sd']
    try:
        sd = float(sd)
    except ValueError:
        if not gs.find_file(sd)['file']:
            gs.fatal(_("Raster map <%s> not found") % sd)

    alpha = float(options['alpha'])

    # set aside region for internal use
    gs.use_temp_region()

    # subset input if desired
    region = options.get('region')
    if region:
        if not gs.find_file(region)['file']:
            gs.fatal(_("Raster map <%s> not found") % region)
        gs.message("Setting region to %s" % region, flag='i')
        gs.run_command('g.region', rast=region, align=input)
    else:
        gs.message("Using existing GRASS region", flag='i')
    gs.debug('='*50)
    gs.debug('\n'.join(gs.parse_command('g.region', 'p').keys()))
    gs.debug('='*50)

    multiscalesmooth(input, smooth, sd, alpha)

    # restore original region
    gs.del_temp_region()

    return None
Beispiel #25
0
def main():
    table = options["table"]
    column = options["column"]
    otable = options["other_table"]
    ocolumn = options["other_column"]
    if options["subset_columns"]:
        scolumns = options["subset_columns"].split(",")
    else:
        scolumns = None

    database = options["database"]
    driver = options["driver"]

    # this error handling is completely different among th db.* scripts - FIX
    if not database:
        database = None
    if not driver:
        driver = None

    if driver == "dbf":
        grass.fatal(_("JOIN is not supported for tables stored in DBF format"))

    # describe input table
    all_cols_tt = grass.db_describe(table, driver=driver,
                                    database=database)["cols"]
    if not all_cols_tt:
        grass.fatal(_("Unable to describe table <%s>") % table)
    found = False

    # check if column is in input table
    if column not in [col[0] for col in all_cols_tt]:
        grass.fatal(_("Column <%s> not found in table <%s>") % (column, table))

    # describe other table
    all_cols_ot = grass.db_describe(otable, driver=driver,
                                    database=database)["cols"]

    # check if ocolumn is in other table
    if ocolumn not in [ocol[0] for ocol in all_cols_ot]:
        grass.fatal(
            _("Column <%s> not found in table <%s>") % (ocolumn, otable))

    # determine columns subset from other table
    if not scolumns:
        # select all columns from other table
        cols_to_add = all_cols_ot
    else:
        cols_to_add = []
        # check if scolumns exists in the other table
        for scol in scolumns:
            found = False
            for col_ot in all_cols_ot:
                if scol == col_ot[0]:
                    found = True
                    cols_to_add.append(col_ot)
                    break
            if not found:
                grass.warning(
                    _("Column <%s> not found in table <%s>") % (scol, otable))

    select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column"
    template = string.Template("UPDATE $table SET $colname=(%s);" % select)

    for col in cols_to_add:
        # skip the vector column which is used for join
        colname = col[0]
        if colname == column:
            continue

        use_len = False
        if len(col) > 2:
            use_len = True
            # Sqlite 3 does not support the precision number any more
            if driver == "sqlite":
                use_len = False
            # MySQL - expect format DOUBLE PRECISION(M,D), see #2792
            elif driver == "mysql" and col[1] == "DOUBLE PRECISION":
                use_len = False

        if use_len:
            coltype = "%s(%s)" % (col[1], col[2])
        else:
            coltype = "%s" % col[1]

        colspec = "%s %s" % (colname, coltype)

        # add only the new column to the table
        if colname not in all_cols_tt:
            p = grass.feed_command("db.execute",
                                   input="-",
                                   database=database,
                                   driver=driver)
            p.stdin.write("ALTER TABLE %s ADD COLUMN %s" % (table, colspec))
            grass.debug("ALTER TABLE %s ADD COLUMN %s" % (table, colspec))
            p.stdin.close()
            if p.wait() != 0:
                grass.fatal(_("Unable to add column <%s>.") % colname)

        stmt = template.substitute(table=table,
                                   column=column,
                                   otable=otable,
                                   ocolumn=ocolumn,
                                   colname=colname)
        grass.debug(stmt, 1)
        grass.verbose(
            _("Updating column <%s> of table <%s>...") % (colname, table))
        try:
            grass.write_command("db.execute",
                                stdin=stmt,
                                input="-",
                                database=database,
                                driver=driver)
        except CalledModuleError:
            grass.fatal(_("Error filling column <%s>") % colname)

    return 0
Beispiel #26
0
def main():
    map = options['map']
    layer = options['layer']
    column = options['column']
    otable = options['otable']
    ocolumn = options['ocolumn']
    if options['scolumns']:
        scolumns = options['scolumns'].split(',')
    else:
        scolumns = None
    
    f = grass.vector_layer_db(map, layer)

    maptable = f['table']
    database = f['database']
    driver   = f['driver']

    if driver == 'dbf':
	grass.fatal(_("JOIN is not supported for tables stored in DBF format"))

    if not maptable:
	grass.fatal(_("There is no table connected to this map. Unable to join any column."))

    # check if column is in map table
    if not grass.vector_columns(map, layer).has_key(column):
	grass.fatal(_("Column <%s> not found in table <%s>") % (column, maptable))

    # describe other table
    all_cols_ot = grass.db_describe(otable, driver = driver, database = database)['cols']

    # check if ocolumn is on other table
    if ocolumn not in [ocol[0] for ocol in all_cols_ot]:
	grass.fatal(_("Column <%s> not found in table <%s>") % (ocolumn, otable))

    # determine columns subset from other table
    if not scolumns:
        # select all columns from other table
        cols_to_add = all_cols_ot
    else:
        cols_to_add = []
    	# check if scolumns exists in the other table
    	for scol in scolumns:
            found = False
            for col_ot in all_cols_ot:
                if scol == col_ot[0]:
                    found = True
                    cols_to_add.append(col_ot)
                    break
            if not found:
                grass.warning(_("Column <%s> not found in table <%s>.") % (scol, otable))
    
    all_cols_tt = grass.vector_columns(map, int(layer)).keys()

    select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column"
    template = string.Template("UPDATE $table SET $colname=(%s);" % select)

    for col in cols_to_add:
	# skip the vector column which is used for join
	colname = col[0]
	if colname == column:
	    continue
	# Sqlite 3 does not support the precision number any more
	if len(col) > 2 and driver != "sqlite":
	    coltype = "%s(%s)" % (col[1], col[2])
	else:
	    coltype = "%s" % col[1]

	colspec = "%s %s" % (colname, coltype)

	# add only the new column to the table
	if colname not in all_cols_tt:
	    if grass.run_command('v.db.addcolumn', map = map, columns = colspec, layer = layer) != 0:
	        grass.fatal(_("Error creating column <%s>") % colname)

	stmt = template.substitute(table = maptable, column = column,
				   otable = otable, ocolumn = ocolumn,
				   colname = colname)
        grass.debug(stmt, 1)
        grass.verbose(_("Updating column <%s> of vector map <%s>...") % (colname, map))
	if grass.write_command('db.execute', stdin = stmt, input = '-', database = database, driver = driver) != 0:
	    grass.fatal(_("Error filling column <%s>") % colname)

    # write cmd history
    grass.vector_history(map)

    return 0
Beispiel #27
0
    def _download(self):
        """!Downloads data from WMS server using own driver

        @return temp_map with downloaded data
        """
        grass.message(_("Downloading data from WMS server..."))

        if "?" in self.params["url"]:
            self.params["url"] += "&"
        else:
            self.params["url"] += "?"

        if not self.params['capfile']:
            self.cap_file = self._fetchCapabilities(self.params)
        else:
            self.cap_file = self.params['capfile']

        # initialize correct manager according to chosen OGC service
        if self.params['driver'] == 'WMTS_GRASS':
            req_mgr = WMTSRequestMgr(
                self.params,
                self.bbox,
                self.region,
                self.proj_srs,
                self.cap_file)
        elif self.params['driver'] == 'WMS_GRASS':
            req_mgr = WMSRequestMgr(
                self.params,
                self.bbox,
                self.region,
                self.tile_size,
                self.proj_srs)
        elif self.params['driver'] == 'OnEarth_GRASS':
            req_mgr = OnEarthRequestMgr(
                self.params,
                self.bbox,
                self.region,
                self.proj_srs,
                self.cap_file)

        # get information about size in pixels and bounding box of raster, where
        # all tiles will be joined
        map_region = req_mgr.GetMapRegion()

        init = True
        temp_map = None

        fetch_try = 0

        # iterate through all tiles and download them
        while True:

            if fetch_try == 0:
                # get url for request the tile and information for placing the tile into
                # raster with other tiles
                tile = req_mgr.GetNextTile()

            # if last tile has been already downloaded
            if not tile:
                break

            # url for request the tile
            query_url = tile[0]

            # the tile size and offset in pixels for placing it into raster where tiles are joined
            tile_ref = tile[1]
            grass.debug(query_url, 2)
            try:
                wms_data = self._fetchDataFromServer(
                    query_url, self.params['username'],
                    self.params['password'])
            except (IOError, HTTPException) as e:
                if isinstance(e, HTTPError) and e.code == 401:
                    grass.fatal(
                        _("Authorization failed to '%s' when fetching data.\n%s") %
                        (self.params['url'], str(e)))
                else:
                    grass.fatal(
                        _("Unable to fetch data from: '%s'\n%s") %
                        (self.params['url'], str(e)))

            temp_tile = self._tempfile()

            # download data into temporary file
            try:
                temp_tile_opened = open(temp_tile, 'wb')
                temp_tile_opened.write(wms_data.read())
            except IOError as e:
                # some servers are not happy with many subsequent requests for tiles done immediately,
                # if immediate request was unsuccessful, try to repeat the request after 5s and 30s breaks
                # TODO probably servers can return more kinds of errors related to this
                # problem (not only 104)
                if isinstance(e, socket.error) and e[0] == 104 and fetch_try < 2:
                    fetch_try += 1

                    if fetch_try == 1:
                        sleep_time = 5
                    elif fetch_try == 2:
                        sleep_time = 30

                    grass.warning(
                        _("Server refused to send data for a tile.\nRequest will be repeated after %d s.") %
                        sleep_time)

                    sleep(sleep_time)
                    continue
                else:
                    grass.fatal(_("Unable to write data into tempfile.\n%s") % str(e))
            finally:
                temp_tile_opened.close()

            fetch_try = 0

            tile_dataset_info = gdal.Open(temp_tile, gdal.GA_ReadOnly)
            if tile_dataset_info is None:
                # print error xml returned from server
                try:
                    error_xml_opened = open(temp_tile, 'rb')
                    err_str = error_xml_opened.read()
                except IOError as e:
                    grass.fatal(_("Unable to read data from tempfile.\n%s") % str(e))
                finally:
                    error_xml_opened.close()

                if err_str is not None:
                    grass.fatal(_("WMS server error: %s") % err_str)
                else:
                    grass.fatal(_("WMS server unknown error"))

            temp_tile_pct2rgb = None
            if tile_dataset_info.RasterCount == 1 and \
               tile_dataset_info.GetRasterBand(1).GetRasterColorTable() is not None:
                # expansion of color table into bands
                temp_tile_pct2rgb = self._tempfile()
                tile_dataset = self._pct2rgb(temp_tile, temp_tile_pct2rgb)
            else:
                tile_dataset = tile_dataset_info

            # initialization of temp_map_dataset, where all tiles are merged
            if init:
                temp_map = self._tempfile()

                driver = gdal.GetDriverByName(self.gdal_drv_format)
                metadata = driver.GetMetadata()
                if gdal.DCAP_CREATE not in metadata or \
                        metadata[gdal.DCAP_CREATE] == 'NO':
                    grass.fatal(_('Driver %s does not supports Create() method') % drv_format)
                self.temp_map_bands_num = tile_dataset.RasterCount
                temp_map_dataset = driver.Create(temp_map, map_region['cols'], map_region['rows'],
                                                 self.temp_map_bands_num,
                                                 tile_dataset.GetRasterBand(1).DataType)
                init = False

            # tile is written into temp_map
            tile_to_temp_map = tile_dataset.ReadRaster(0, 0, tile_ref['sizeX'], tile_ref['sizeY'],
                                                       tile_ref['sizeX'], tile_ref['sizeY'])

            temp_map_dataset.WriteRaster(tile_ref['t_cols_offset'], tile_ref['t_rows_offset'],
                                         tile_ref['sizeX'], tile_ref['sizeY'], tile_to_temp_map)

            tile_dataset = None
            tile_dataset_info = None
            grass.try_remove(temp_tile)
            grass.try_remove(temp_tile_pct2rgb)

        if not temp_map:
            return temp_map
        # georeferencing and setting projection of temp_map
        projection = grass.read_command('g.proj',
                                        flags='wf',
                                        epsg=self.params['srs'])
        projection = projection.rstrip('\n')
        temp_map_dataset.SetProjection(grass.encode(projection))

        pixel_x_length = (map_region['maxx'] - map_region['minx']) / int(map_region['cols'])
        pixel_y_length = (map_region['miny'] - map_region['maxy']) / int(map_region['rows'])

        geo_transform = [
            map_region['minx'],
            pixel_x_length,
            0.0,
            map_region['maxy'],
            0.0,
            pixel_y_length]
        temp_map_dataset.SetGeoTransform(geo_transform)
        temp_map_dataset = None

        return temp_map
Beispiel #28
0
def compute_attractiveness(raster,
                           metric,
                           constant,
                           kappa,
                           alpha,
                           mask=None,
                           output_name=None):
    """
    Compute a raster map whose values follow an (euclidean) distance function
    ( {constant} + {kappa} ) / ( {kappa} + exp({alpha} * {distance}) ), where:

    Source: http://publications.jrc.ec.europa.eu/repository/bitstream/JRC87585/lb-na-26474-en-n.pd

    Parameters
    ----------
    constant : 1

    kappa :
        A constant named 'K'

    alpha :
        A constant named 'a'

    distance :
        A distance map based on the input raster

    score :
        A score term to multiply the distance function

    mask :
        Optional raster MASK which is inverted to selectively exclude non-NULL
        cells from distance related computations.

    output_name :
        Name to pass to temporary_filename() to create a temporary map name

    Returns
    -------
    tmp_output :
        A temporary proximity to features raster map.

    Examples
    --------
    ...

    """
    distance_terms = [
        str(raster),
        str(metric),
        "distance",
        str(constant),
        str(kappa),
        str(alpha),
    ]

    if score:
        grass.debug(_(
            "Score for attractiveness equation: {s}".format(s=score)))
        distance_terms += str(score)

    # tmp_distance = temporary_filename('_'.join(distance_terms))
    tmp_distance = temporary_filename(filename="_".join([raster, metric]))
    r.grow_distance(input=raster,
                    distance=tmp_distance,
                    metric=metric,
                    quiet=True,
                    overwrite=True)

    if mask:
        msg = "Inverted masking to exclude non-NULL cells "
        msg += "from distance related computations based on '{mask}'"
        msg = msg.format(mask=mask)
        grass.verbose(_(msg))
        r.mask(raster=mask, flags="i", overwrite=True, quiet=True)

    # FIXME: use a parameters dictionary, avoid conditionals
    if score:
        distance_function = build_distance_function(
            constant=constant,
            kappa=kappa,
            alpha=alpha,
            variable=tmp_distance,
            score=score,
        )

    # FIXME: use a parameters dictionary, avoid conditionals
    if not score:
        distance_function = build_distance_function(constant=constant,
                                                    kappa=kappa,
                                                    alpha=alpha,
                                                    variable=tmp_distance)

    # temporary maps will be removed
    if output_name:
        tmp_distance_map = temporary_filename(filename=output_name)
    else:
        basename = "_".join([raster, "attractiveness"])
        tmp_distance_map = temporary_filename(filename=basename)

    distance_function = EQUATION.format(result=tmp_distance_map,
                                        expression=distance_function)
    msg = "Distance function: {f}".format(f=distance_function)
    grass.verbose(_(msg))
    grass.mapcalc(distance_function, overwrite=True)

    r.null(map=tmp_distance_map, null=0)  # Set NULLs to 0

    compress_status = grass.read_command("r.compress",
                                         flags="g",
                                         map=tmp_distance_map)
    grass.verbose(_(
        "Compress status: {s}".format(s=compress_status)))  # REMOVEME

    return tmp_distance_map
Beispiel #29
0
def build_distance_function(constant,
                            kappa,
                            alpha,
                            variable,
                            score=None,
                            suitability=None):
    """
    Build a valid `r.mapcalc` expression based on the following "space-time"
    function:

        ( {constant} + {kappa} ) / ( {kappa} + exp({alpha} * {variable}) )

    Parameters
    ----------
    constant :
        1

    kappa :
        A constant named 'K'

    alpha :
        A constant named 'a'

    variable :
        The main input variable: for 'attractiveness' it is 'distance', for
        'flow' it is 'population'

    score :
        If 'score' is given, it is used as a multiplication factor to the base
        equation.

    suitability :
        [ NOTE: this argument is yet unused!  It is meant to be used only after
        successful integration of land suitability scores in
        build_distance_function(). ]
        If 'suitability' is given, it is used as a multiplication
        factor to the base equation.

    Returns
    -------
    function:
        A valid `r.mapcalc` expression

    Examples
    --------
    ..
    """
    numerator = "{constant} + {kappa}"
    numerator = numerator.format(constant=constant, kappa=kappa)

    denominator = "{kappa} + exp({alpha} * {variable})"
    denominator = denominator.format(
        kappa=kappa, alpha=alpha,
        variable=variable)  # variable "formatted" by a caller function

    function = " ( {numerator} ) / ( {denominator} )"
    function = function.format(numerator=numerator, denominator=denominator)
    grass.debug("Function without score: {f}".format(f=function))

    if score:
        function += " * {score}"  # need for float()?
        function = function.format(score=score)
    grass.debug(_("Function after adding 'score': {f}".format(f=function)))

    # -------------------------------------------------------------------------
    # if suitability:
    #     function += " * {suitability}"  # FIXME : Confirm Correctness
    #     function = function.format(suitability=suitability)
    # msg = "Function after adding 'suitability': {f}".format(f=function)
    # grass.debug(_(msg))
    # -------------------------------------------------------------------------

    return function
Beispiel #30
0
    def _computeRequestData(self, bbox, tl_corner, tile_span, tile_size, mat_num_bbox):
        """!Initialize data needed for iteration through tiles. Used by WMTS_GRASS and OnEarth_GRASS drivers."""
        epsilon = 1e-15

        # request data bbox specified in row and col number
        self.t_num_bbox = {}

        self.t_num_bbox["min_col"] = int(
            floor((bbox["minx"] - tl_corner["minx"]) / tile_span["x"] + epsilon)
        )
        self.t_num_bbox["max_col"] = int(
            floor((bbox["maxx"] - tl_corner["minx"]) / tile_span["x"] - epsilon)
        )

        self.t_num_bbox["min_row"] = int(
            floor((tl_corner["maxy"] - bbox["maxy"]) / tile_span["y"] + epsilon)
        )
        self.t_num_bbox["max_row"] = int(
            floor((tl_corner["maxy"] - bbox["miny"]) / tile_span["y"] - epsilon)
        )

        # Does required bbox intersects bbox of data available on server?
        self.intersects = False
        for col in ["min_col", "max_col"]:
            for row in ["min_row", "max_row"]:
                if (
                    self.t_num_bbox["min_row"] <= self.t_num_bbox[row]
                    and self.t_num_bbox[row] <= mat_num_bbox["max_row"]
                ) and (
                    self.t_num_bbox["min_col"] <= self.t_num_bbox[col]
                    and self.t_num_bbox[col] <= mat_num_bbox["max_col"]
                ):
                    self.intersects = True

        if not self.intersects:
            grass.warning(_("Region is out of server data extend."))
            self.map_region = None
            return

        # crop request bbox to server data bbox extend
        if self.t_num_bbox["min_col"] < (mat_num_bbox["min_col"]):
            self.t_num_bbox["min_col"] = int(mat_num_bbox["min_col"])

        if self.t_num_bbox["max_col"] > (mat_num_bbox["max_col"]):
            self.t_num_bbox["max_col"] = int(mat_num_bbox["max_col"])

        if self.t_num_bbox["min_row"] < (mat_num_bbox["min_row"]):
            self.t_num_bbox["min_row"] = int(mat_num_bbox["min_row"])

        if self.t_num_bbox["max_row"] > (mat_num_bbox["max_row"]):
            self.t_num_bbox["max_row"] = int(mat_num_bbox["max_row"])

        grass.debug(
            "t_num_bbox: min_col:%d max_col:%d min_row:%d max_row:%d"
            % (
                self.t_num_bbox["min_col"],
                self.t_num_bbox["max_col"],
                self.t_num_bbox["min_row"],
                self.t_num_bbox["max_row"],
            ),
            3,
        )

        num_tiles = (self.t_num_bbox["max_col"] - self.t_num_bbox["min_col"] + 1) * (
            self.t_num_bbox["max_row"] - self.t_num_bbox["min_row"] + 1
        )
        grass.message(
            _("Fetching %d tiles with %d x %d pixel size per tile...")
            % (num_tiles, tile_size["x"], tile_size["y"])
        )

        # georeference of raster, where tiles will be merged
        self.map_region = {}
        self.map_region["minx"] = (
            self.t_num_bbox["min_col"] * tile_span["x"] + tl_corner["minx"]
        )
        self.map_region["maxy"] = (
            tl_corner["maxy"] - (self.t_num_bbox["min_row"]) * tile_span["y"]
        )

        self.map_region["maxx"] = (self.t_num_bbox["max_col"] + 1) * tile_span[
            "x"
        ] + tl_corner["minx"]
        self.map_region["miny"] = (
            tl_corner["maxy"] - (self.t_num_bbox["max_row"] + 1) * tile_span["y"]
        )

        # size of raster, where tiles will be merged
        self.map_region["cols"] = int(
            tile_size["x"]
            * (self.t_num_bbox["max_col"] - self.t_num_bbox["min_col"] + 1)
        )
        self.map_region["rows"] = int(
            tile_size["y"]
            * (self.t_num_bbox["max_row"] - self.t_num_bbox["min_row"] + 1)
        )

        # hold information about current column and row during iteration
        self.i_col = self.t_num_bbox["min_col"]
        self.i_row = self.t_num_bbox["min_row"]

        # bbox for first tile request
        self.query_bbox = {
            "minx": tl_corner["minx"],
            "maxy": tl_corner["maxy"],
            "maxx": tl_corner["minx"] + tile_span["x"],
            "miny": tl_corner["maxy"] - tile_span["y"],
        }

        self.tile_ref = {"sizeX": tile_size["x"], "sizeY": tile_size["y"]}
Beispiel #31
0
def normalize_map(raster, output_name):
    """
    Normalize all raster map cells by subtracting the raster map's minimum and
    dividing by the range.

    Parameters
    ----------
    raster :
        Name of input raster map

    output_name :
        Name of output raster map

    Returns
    -------

    Examples
    --------
    ...
    """
    # grass.debug(_("Input to normalize: {name}".format(name=raster)))
    # grass.debug(_("Ouput: {name}".format(name=output_name)))

    finding = grass.find_file(name=raster, element="cell")

    if not finding["file"]:
        grass.fatal("Raster map {name} not found".format(name=raster))
    # else:
    #     grass.debug("Raster map {name} found".format(name=raster))

    # univar_string = grass.read_command('r.univar', flags='g', map=raster)
    # univar_string = univar_string.replace('\n', '| ').replace('\r', '| ')
    # msg = "Univariate statistics: {us}".format(us=univar_string)

    minimum = grass.raster_info(raster)["min"]
    grass.debug(_("Minimum: {m}".format(m=minimum)))

    maximum = grass.raster_info(raster)["max"]
    grass.debug(_("Maximum: {m}".format(m=maximum)))

    if minimum is None or maximum is None:
        msg = "Minimum and maximum values of the <{raster}> map are 'None'.\n"
        msg += "=========================================== \n"
        msg += "Possible sources for this erroneous case are: "
        msg += "\n  - the <{raster}> map is empty "
        msg += "\n  - the MASK opacifies all non-NULL cells "
        msg += "\n  - the region is not correctly set\n"
        msg += "=========================================== "
        grass.fatal(_(msg.format(raster=raster)))

    normalisation = "float(({raster} - {minimum}) / ({maximum} - {minimum}))"
    normalisation = normalisation.format(raster=raster,
                                         minimum=minimum,
                                         maximum=maximum)

    # Maybe this can go in the parent function? 'raster' names are too long!
    # msg = "Normalization expression: "
    # msg += normalisation
    # grass.verbose(_(msg))

    normalisation_equation = EQUATION.format(result=output_name,
                                             expression=normalisation)
    grass.mapcalc(normalisation_equation, overwrite=True)
    get_univariate_statistics(output_name)
Beispiel #32
0
 def _debug(self, fn, msg):
     grass.debug("%s.%s: %s" % (self.__class__.__name__, fn, msg))
Beispiel #33
0
 def _debug(self, fun, msg):
     """Print debug messages"""
     grass.debug("%s.%s: %s" % (self.__class__.__name__, fun, msg))
def worker(params, nace_queue, output_queue):
    pid = os.getpid()
    try:
        for nace in iter(nace_queue.get, 'STOP'):

            gscript.info("Working on NACE %s" % nace)

            tempdist0 = 'tempdist0_%d' % pid
            tempdist = 'tempdist_%d' % pid

            try:
                gamma = params['gammas_nace'][nace]
                gscript.verbose("Gamma of NACE %s = %f" % (nace, gamma))
            except:
                gscript.message("No gamma value found for NACE %s" % nace)
                output_queue.put([nace, None])
                continue

            gscript.verbose("Calculating individual rate maps for NACE %s" % nace)

            query = "SELECT cat, x, y, %s, %s" % (params['volume_measure'], params['spatial_unit_name'])
            query += " FROM %s" % params['pointmap']
            query += " WHERE %s>0" % params['volume_measure']
            query += " AND %s='%s'" % (params['nace_info_column'], nace)

            database = gscript.vector_db(params['pointmap'])[1]['database']
            firms = gscript.read_command('db.select',
                                        flags = 'c',
                                        sql = query,
                                        database = database)
            
            if len(firms) == 0:
                continue

            firm_info = {}
            rate_maps = []
            total_pop_spatial_unit = {}
            total_empl_spatial_unit = {}
            total_empl_outside_spatial_unit = {}
            firm_totals_filename=gscript.tempfile()
            x = {}
            y = {}
            for firm in firms.splitlines():
                cat = firm.split('|')[0]
                x[cat] = firm.split('|')[1]
                y[cat] = firm.split('|')[2]
                volume = firm.split('|')[3]
                spatial_unit = firm.split('|')[4]
                firm_info[cat] = spatial_unit

                mapcalc_expression = "%s = sqrt( (x()-%s)^2 + (y()-%s)^2 )" % (
                                    tempdist0, x[cat], y[cat])

                gscript.run_command('r.mapcalc',
                                   expression=mapcalc_expression,
                                   overwrite=True,
                                   quiet=True)

                mapcalc_expression = "%s = if(%s == 0, %f, %s)" % (tempdist,
                        tempdist0, float(params['resolution'])/2, tempdist0)

                gscript.run_command('r.mapcalc',
                                   expression=mapcalc_expression,
                                   overwrite=True,
                                   quiet=True)

                firm_rate_map = "firm_rate_%d_%s" % (pid, cat)
                rate_maps.append(firm_rate_map)
                mapcalc_expression = "%s = float(%s) / exp(%s, %F)" % (firm_rate_map, volume, tempdist, gamma)

                gscript.run_command('r.mapcalc',
                                   expression=mapcalc_expression,
                                   overwrite=True,
                                   quiet=True)

            gscript.debug("Calculating sum of rates for NACE %s" % nace)

            sum_rates = 'sum_rates_%d' % pid

            fname=gscript.tempfile()
            f_rate_maps=open(fname, 'w')
            for rate_map in rate_maps:
                print(rate_map, file=f_rate_maps)
            f_rate_maps.close()


            gscript.run_command('r.series',
                               file_=fname,
                               output=sum_rates,
                               method='sum',
                               overwrite=True,
                               quiet=True)

            gscript.try_remove(fname)

            gscript.verbose("Calculating probabilities and population concerned for NACE %s" % nace)

            tempprob = 'temp_prob_%d' % pid
            for firm_rate_map in rate_maps:

                mapcalc_expression = "%s = float(%s) / float(%s)" % (tempprob, firm_rate_map, sum_rates)

                gscript.run_command('r.mapcalc',
                                   expression = mapcalc_expression,
                                   overwrite = True,
                                   quiet=True)

                if params['remove_tmps']:
                    gscript.run_command('g.remove',
                                       type_ = 'raster',
                                       name = firm_rate_map,
                                       flags = 'f',
                                       quiet = True)

                cat = firm_rate_map.split('_')[-1] 
                firm_pop_map = 'firm_pop_%d' % pid

                nace2 = nace[:2]
                if params['inc_tb']:
                    trade_balance = params['trade_balance_over_output_by_nace'][nace2]
                    mapcalc_expression = "%s = %s * (%s * (1 - %f))" % (firm_pop_map, params['consumption_population_map'], tempprob, trade_balance)
                else:
                    mapcalc_expression = "%s = %s * %s" % (firm_pop_map, tempprob, params['population_map']) 

                gscript.run_command('r.mapcalc',
                                   expression = mapcalc_expression,
                                   overwrite = True,
                                   quiet=True)

                pop_map_stats = gscript.parse_command('r.univar',
                                                     flags = "g",
                                                     map = firm_pop_map)

                spatial_unit = firm_info[cat]
                sum_pop = float(pop_map_stats['sum'])
                if spatial_unit in total_pop_spatial_unit:
                    total_pop_spatial_unit[spatial_unit] += sum_pop
                else:
                    total_pop_spatial_unit[spatial_unit] = sum_pop

                if params['calc_consumption_pop_indicator']:
                    # Calculate the total employment for exports
                    # total employment x share of consumption population that is
                    # outside the spatial unit the firm is in
                    tempraster = 'huff_tempraster_%s_%s' % (spatial_unit, pid)
                    gscript.run_command('v.to.rast',
                                        input_=params['spatial_unit_map'],
                                        where="%s <> '%s' " % (params['spatial_unit_name_map'], spatial_unit),
                                        output=tempraster,
                                        use='val',
                                        overwrite=True,
                                        quiet=True)
                    pop_map_stats = gscript.parse_command('r.univar',
                                                         flags="g",
                                                         map=firm_pop_map,
                                                         zones=tempraster)

                    sum_pop_outside = float(pop_map_stats['sum'])
                    prop_outside = sum_pop_outside / sum_pop
                    total_empl = int(gscript.read_command('v.db.select',
                                                      map_=params['pointmap'],
                                                      column=params['volume_measure'],
                                                      where='cat = %d' % int(cat),
                                                      flags='c',
                                                      quiet=True))
                    if spatial_unit in total_empl_spatial_unit:
                        total_empl_spatial_unit[spatial_unit] += total_empl
                    else:
                        total_empl_spatial_unit[spatial_unit] = total_empl

                    exp_empl = int(total_empl) * prop_outside
                    if spatial_unit in total_empl_outside_spatial_unit:
                        total_empl_outside_spatial_unit[spatial_unit] += exp_empl
                    else:
                        total_empl_outside_spatial_unit[spatial_unit] = exp_empl

                    gscript.run_command('g.remove',
                                        type='rast',
                                        name=tempraster,
                                        flags='f',
                                        quiet=True)

                if(params['calculate_total_consumption_potentials']):                
                    firm_total = "%s|%s|%d\n" % (x[cat], y[cat], sum_pop)
                    with open(firm_totals_filename, 'a') as f_firm_totals:
                        f_firm_totals.write(firm_total)

            if params['remove_tmps']:
                gscript.run_command('g.remove',
                                   type_ = 'raster',
                                   name = tempprob,
                                   flags = 'f',
                                   quiet = True)

                gscript.run_command('g.remove',
                                   type_ = 'raster',
                                   name = firm_pop_map,
                                   flags = 'f',
                                   quiet = True)

                gscript.run_command('g.remove',
                                   type_ = 'raster',
                                   name = sum_rates,
                                   flags = 'f',
                                   quiet = True)

                gscript.run_command('g.remove',
                                   type_ = 'raster',
                                   name = tempdist,
                                   flags = 'f',
                                   quiet = True)

                gscript.run_command('g.remove',
                                   type_ = 'raster',
                                   name = tempdist0,
                                   flags = 'f',
                                   quiet = True)


            gscript.verbose('Writing results to files')
	    with open(params['pop_by_spatial_unit_and_nace_file'], 'a') as f:
                for spatial_unit, pop in total_pop_spatial_unit.iteritems():
                    output_string = nace + ';' + spatial_unit + ';' + str(pop) + '\n'
                    f.write(output_string)

            if params['calc_consumption_pop_indicator']:
                with open(params['empl_by_spatial_unit_and_nace_file'], 'a') as f:
                    for spatial_unit, pop in total_empl_outside_spatial_unit.iteritems():
                        output_string = nace + ';'
                        output_string += spatial_unit + ';'
                        output_string += str(total_empl_spatial_unit[spatial_unit]) + ';'
                        output_string += str(pop) + '\n'
                        f.write(output_string)

            output_queue.put([nace, 'OK'])

            if(params['calculate_total_consumption_potentials']):
                tot_cons_potential_map = 'total_consumption_potential_%s_%s' % (params['pointmap'], nace)
                gscript.run_command('r.in.xyz',
                                   input_ = firm_totals_filename,
                                   method = 'sum',
                                   output = tot_cons_potential_map,
                                   overwrite = True,
                                   quiet = True)
                gscript.try_remove(firm_totals_filename)

            gscript.info("Finished with NACE %s" % nace)


    except:
        output_queue.put([nace, None])
        

    return True
def remove_map_at_exit(map_name):
    """Remove the provided map when the program exits"""
    msg = "*** Add '{map}' to list of maps to remove when program exits"
    grass.debug(_(msg.format(map=map_name)))
    atexit.register(lambda: remove_map(map_name))
Beispiel #36
0
def get_session(options):
    """Based on a dictionary and available backends create a remote session"""
    requested_backend = options['backend']
    if requested_backend:
        backends = [requested_backend]
    else:
        # on win there is minimal chance of ssh but try anyway
        # pexpect only upon request, it is specific and insufficiently tested
        backends = ['paramiko', 'simple']
    session = None
    ensure_nones(options, ['port', 'password'])
    to_ints(options, ['port'])

    # TODO: provide a flag (or default) for reading the file or params
    # from some standardized location or variable (so we have shorter
    # command lines)
    config_name = options['config']
    if config_name:
        gscript.debug("Config file supplied for login")
        check_config_file(config_name)
        with open(config_name, 'r') as config_file:
            config = config_file.read()
            # split using whitespace
            # (supposing no spaces in user name and password)
            values = config.split()
            if len(values) == 2:
                gscript.verbose(_("Using values for login from config file"))
                options['user'] = values[0]
                options['password'] = values[1]
            else:
                gscript.fatal(_("The config file <%s> is not well-formed."
                                " It should contain user name and password"
                                " separated by whitespace"
                                " (newlines, spaces or tabs)" % config_name))

    # get access to wrappers
    from grass.pygrass.utils import set_path
    set_path('g.remote')

    for backend in backends:
        if backend == 'paramiko':
            try:
                from friendlyssh import Connection
                session = Connection(
                    username=options['user'], host=options['server'],
                    password=options['password'], port=options['port'])
                gscript.verbose(_("Using Paramiko backend"))
                break
            except ImportError as error:
                gscript.verbose(_("Tried Paramiko backend but"
                                  " it is not available (%s)" % error))
                continue
        elif backend == 'simple':
            try:
                from simplessh import SshConnection as Connection
                # TODO: support password and port (or warn they are missing)
                session = Connection(
                    user=options['user'], host=options['server'])
                gscript.verbose(_("Using simple (ssh and scp) backend"))
                break
            except ImportError as error:
                gscript.verbose(_("Tried simple (ssh and scp) backend but"
                                  " it is not available (%s)" % error))
                continue
        elif backend == 'pexpect':
            try:
                from pexpectssh import SshSession as Connection
                # TODO: support port (or warn it's missing)
                session = Connection(
                    user=options['user'], host=options['server'],
                    logfile='gcloudsshiface.log', verbose=1,
                    password=options['password'])
                gscript.verbose(_("Using Pexpect (with ssh and scp) backend"))
                break
            except ImportError as error:
                gscript.verbose(_("Tried Pexpect (ssh, scp and pexpect)"
                                  " backend but it is not available"
                                  " (%s)" % error))
                continue
        elif backend == 'local':
            try:
                from localsession import LocalConnection as Connection
                session = Connection()
                gscript.verbose(_("Using local host backend"))
                break
            except ImportError as error:
                gscript.verbose(_("Tried local host"
                                  " backend but it is not available"
                                  " (%s)" % error))
                continue
    if session is None:
        hint = _("Please install Paramiko Python package"
                 " or ssh and scp tools.")
        verbose_message = _("Use --verbose flag to get more information.")
        if sys.platform.startswith('win'):
            platform_hint = _("Note that the ssh is generally not available"
                              " for MS Windows. Paramiko should be accessible"
                              " through python pip but you have to make it"
                              " available to GRASS GIS (or OSGeo4W) Python.")
        else:
            platform_hint = _("All should be in the software repositories."
                              " If Paramiko is not in the repository use pip.")
        gscript.fatal(_(
            "No backend available. {general_hint} {platform_hint}"
            " {verbose}").format(
                general_hint=hint, platform_hint=platform_hint,
                verbose=verbose_message))
    return session
Beispiel #37
0
    def _computeRequestData(self, bbox, tl_corner, tile_span, tile_size, mat_num_bbox):
        """!Initialize data needed for iteration through tiles. Used by WMTS_GRASS and OnEarth_GRASS drivers.
        """
        epsilon = 1e-15

        # request data bbox specified in row and col number
        self.t_num_bbox = {}

        self.t_num_bbox['min_col'] = int(
            floor((bbox['minx'] - tl_corner['minx']) / tile_span['x'] + epsilon))
        self.t_num_bbox['max_col'] = int(
            floor((bbox['maxx'] - tl_corner['minx']) / tile_span['x'] - epsilon))

        self.t_num_bbox['min_row'] = int(
            floor((tl_corner['maxy'] - bbox['maxy']) / tile_span['y'] + epsilon))
        self.t_num_bbox['max_row'] = int(
            floor((tl_corner['maxy'] - bbox['miny']) / tile_span['y'] - epsilon))

        # Does required bbox intersects bbox of data available on server?
        self.intersects = False
        for col in ['min_col', 'max_col']:
            for row in ['min_row', 'max_row']:
                if (self.t_num_bbox['min_row'] <= self.t_num_bbox[row] and self.t_num_bbox[row] <= mat_num_bbox['max_row']) and (
                        self.t_num_bbox['min_col'] <= self.t_num_bbox[col] and self.t_num_bbox[col] <= mat_num_bbox['max_col']):
                    self.intersects = True

        if not self.intersects:
            grass.warning(_('Region is out of server data extend.'))
            self.map_region = None
            return

        # crop request bbox to server data bbox extend
        if self.t_num_bbox['min_col'] < (mat_num_bbox['min_col']):
            self.t_num_bbox['min_col'] = int(mat_num_bbox['min_col'])

        if self.t_num_bbox['max_col'] > (mat_num_bbox['max_col']):
            self.t_num_bbox['max_col'] = int(mat_num_bbox['max_col'])

        if self.t_num_bbox['min_row'] < (mat_num_bbox['min_row']):
            self.t_num_bbox['min_row'] = int(mat_num_bbox['min_row'])

        if self.t_num_bbox['max_row'] > (mat_num_bbox['max_row']):
            self.t_num_bbox['max_row'] = int(mat_num_bbox['max_row'])

        grass.debug(
            't_num_bbox: min_col:%d max_col:%d min_row:%d max_row:%d' %
            (self.t_num_bbox['min_col'],
             self.t_num_bbox['max_col'],
             self.t_num_bbox['min_row'],
             self.t_num_bbox['max_row']),
            3)

        num_tiles = (self.t_num_bbox['max_col'] - self.t_num_bbox['min_col'] + 1) * (
            self.t_num_bbox['max_row'] - self.t_num_bbox['min_row'] + 1)
        grass.message(
            _('Fetching %d tiles with %d x %d pixel size per tile...') %
            (num_tiles, tile_size['x'], tile_size['y']))

        # georeference of raster, where tiles will be merged
        self.map_region = {}
        self.map_region['minx'] = self.t_num_bbox['min_col'] * tile_span['x'] + tl_corner['minx']
        self.map_region['maxy'] = tl_corner['maxy'] - (self.t_num_bbox['min_row']) * tile_span['y']

        self.map_region['maxx'] = (
            self.t_num_bbox['max_col'] + 1) * tile_span['x'] + tl_corner['minx']
        self.map_region['miny'] = tl_corner[
            'maxy'] - (self.t_num_bbox['max_row'] + 1) * tile_span['y']

        # size of raster, where tiles will be merged
        self.map_region['cols'] = int(tile_size['x'] *
                                      (self.t_num_bbox['max_col'] - self.t_num_bbox['min_col'] + 1))
        self.map_region['rows'] = int(tile_size['y'] *
                                      (self.t_num_bbox['max_row'] - self.t_num_bbox['min_row'] + 1))

        # hold information about current column and row during iteration
        self.i_col = self.t_num_bbox['min_col']
        self.i_row = self.t_num_bbox['min_row']

        # bbox for first tile request
        self.query_bbox = {
            'minx': tl_corner['minx'],
            'maxy': tl_corner['maxy'],
            'maxx': tl_corner['minx'] + tile_span['x'],
            'miny': tl_corner['maxy'] - tile_span['y'],
        }

        self.tile_ref = {
            'sizeX': tile_size['x'],
            'sizeY': tile_size['y']
        }
def remove_files_at_exit(filename):
    """Remove the specified file when the program exits"""
    msg = "*** Add '{file}' to list of files to remove when program exits"
    grass.debug(_(msg.format(file=filename)))
    atexit.register(lambda: os.unlink(filename))
Beispiel #39
0
def compute_supply(
    base,
    recreation_spectrum,
    highest_spectrum,
    base_reclassification_rules,
    reclassified_base,
    reclassified_base_title,
    flow,
    aggregation,
    ns_resolution,
    ew_resolution,
    print_only=False,
    flow_column_name=None,
    vector=None,
    supply_filename=None,
    use_filename=None,
):
    """
     Algorithmic description of the "Contribution of Ecosysten Types"

     # FIXME
     '''
     1   B ← {0, .., m-1}     :  Set of aggregational boundaries
     2   T ← {0, .., n-1}     :  Set of land cover types
     3   WE ← 0               :  Set of weighted extents
     4   R ← 0                :  Set of fractions
     5   F ← 0
     6   MASK ← HQR           : High Quality Recreation
     7   foreach {b} ⊆ B do   : for each aggregational boundary 'b'
     8      RB ← 0
     9      foreach {t} ⊆ T do  : for each Land Type
     10         WEt ← Et * Wt   : Weighted Extent = Extent(t) * Weight(t)
     11         WE ← WE⋃{WEt}   : Add to set of Weighted Extents
     12     S ← ∑t∈WEt
     13     foreach t ← T do
     14        Rt ← WEt / ∑WE
     15        R ← R⋃{Rt}
     16     RB ← RB⋃{R}
     '''
     # FIXME

    Parameters
    ----------
    recreation_spectrum:
        Map scoring access to and quality of recreation

    highest_spectrum :
        Expected is a map of areas with highest recreational value (category 9
        as per the report ... )

    base :
        Base land types map for final zonal statistics. Specifically to
        ESTIMAP's recrceation mapping algorithm

    base_reclassification_rules :
        Reclassification rules for the input base map

    reclassified_base :
        Name for the reclassified base cover map

    reclassified_base_title :
        Title for the reclassified base map

    ecosystem_types :

    flow :
        Map of visits, derived from the mobility function, depicting the
        number of people living inside zones 0, 1, 2, 3. Used as a cover map
        for zonal statistics.

    aggregation :

    ns_resolution :

    ew_resolution :

    statistics_filename :

    supply_filename :
        Name for CSV output file of the supply table

    use_filename :
        Name for CSV output file of the use table

    flow_column_name :
        Name for column to populate with 'flow' values

    vector :
        If 'vector' is given, a vector map of the 'flow' along with appropriate
        attributes will be produced.

    ? :
        Land cover class percentages in ROS9 (this is: relative percentage)

    output :
        Supply table (distribution of flow for each land cover class)

    Returns
    -------
    This function produces a map to base the production of a supply table in
    form of CSV.

    Examples
    --------
    """
    # Inputs
    flow_in_base = flow + "_" + base
    base_scores = base + ".scores"

    # Define lists and dictionaries to hold intermediate data
    statistics_dictionary = {}
    weighted_extents = {}
    flows = []

    # MASK areas of high quality recreation
    r.mask(raster=highest_spectrum, overwrite=True, quiet=True)

    # Reclassify land cover map to MAES ecosystem types
    r.reclass(
        input=base,
        rules=base_reclassification_rules,
        output=reclassified_base,
        quiet=True,
    )
    # add 'reclassified_base' to "remove_at_exit" after the reclassified maps!

    # Discard areas out of MASK
    temporary_reclassified_base = reclassified_base + "_temporary"
    copy_equation = EQUATION.format(result=temporary_reclassified_base,
                                    expression=reclassified_base)
    r.mapcalc(copy_equation, overwrite=True)
    g.rename(
        raster=(temporary_reclassified_base, reclassified_base),
        overwrite=True,
        quiet=True,
    )

    # Count flow within each land cover category
    r.stats_zonal(
        base=base,
        flags="r",
        cover=flow,
        method="sum",
        output=flow_in_base,
        overwrite=True,
        quiet=True,
    )
    remove_map_at_exit(flow_in_base)

    # Set colors for "flow" map
    r.colors(map=flow_in_base, color=MOBILITY_COLORS, quiet=True)

    # Parse aggregation raster categories and labels
    categories = grass.parse_command("r.category",
                                     map=aggregation,
                                     delimiter="\t")

    for category in categories:

        msg = "\n>>> Processing category '{c}' of aggregation map '{a}'"
        grass.verbose(_(msg.format(c=category, a=aggregation)))

        # Intermediate names

        cells = highest_spectrum + ".cells" + "." + category
        remove_map_at_exit(cells)

        extent = highest_spectrum + ".extent" + "." + category
        remove_map_at_exit(extent)

        weighted = highest_spectrum + ".weighted" + "." + category
        remove_map_at_exit(weighted)

        fractions = base + ".fractions" + "." + category
        remove_map_at_exit(fractions)

        flow_category = "_flow_" + category
        flow = base + flow_category
        remove_map_at_exit(flow)

        flow_in_reclassified_base = reclassified_base + "_flow"
        flow_in_category = reclassified_base + flow_category
        flows.append(flow_in_category)  # add to list for patching
        remove_map_at_exit(flow_in_category)

        # Output names

        msg = "*** Processing aggregation raster category: {r}"
        msg = msg.format(r=category)
        grass.debug(_(msg))
        # g.message(_(msg))

        # First, set region to extent of the aggregation map
        # and resolution to the one of the population map
        # Note the `-a` flag to g.region: ?
        # To safely modify the region: grass.use_temp_region()  # FIXME
        g.region(
            raster=aggregation,
            nsres=ns_resolution,
            ewres=ew_resolution,
            flags="a",
            quiet=True,
        )

        msg = "!!! Computational resolution matched to {raster}"
        msg = msg.format(raster=aggregation)
        grass.debug(_(msg))

        # Build MASK for current category & high quality recreation areas
        msg = " * Setting category '{c}' as a MASK"
        grass.verbose(_(msg.format(c=category, a=aggregation)))

        masking = "if( {spectrum} == {highest_quality_category} && "
        masking += "{aggregation} == {category}, "
        masking += "1, null() )"
        masking = masking.format(
            spectrum=recreation_spectrum,
            highest_quality_category=HIGHEST_RECREATION_CATEGORY,
            aggregation=aggregation,
            category=category,
        )
        masking_equation = EQUATION.format(result="MASK", expression=masking)
        grass.mapcalc(masking_equation, overwrite=True)

        # zoom to MASK
        g.region(zoom="MASK",
                 nsres=ns_resolution,
                 ewres=ew_resolution,
                 quiet=True)

        # Count number of cells within each land category
        r.stats_zonal(
            flags="r",
            base=base,
            cover=highest_spectrum,
            method="count",
            output=cells,
            overwrite=True,
            quiet=True,
        )
        cells_categories = grass.parse_command("r.category",
                                               map=cells,
                                               delimiter="\t")
        grass.debug(_("*** Cells: {c}".format(c=cells_categories)))

        # Build cell category and label rules for `r.category`
        cells_rules = "\n".join([
            "{0}:{1}".format(key, value)
            for key, value in cells_categories.items()
        ])

        # Discard areas out of MASK
        temporary_cells = cells + "_temporary"
        copy_equation = EQUATION.format(result=temporary_cells,
                                        expression=cells)
        r.mapcalc(copy_equation, overwrite=True)
        g.rename(
            raster=(temporary_cells, cells),
            overwrite=True,
            quiet=True,
        )

        # Reassign cell category labels
        r.category(
            map=cells,
            rules="-",
            stdin=cells_rules,
            separator=":",
        )

        # Compute extent of each land category
        extent_expression = "@{cells} * area()"
        extent_expression = extent_expression.format(cells=cells)
        extent_equation = EQUATION.format(result=extent,
                                          expression=extent_expression)
        r.mapcalc(extent_equation, overwrite=True)

        # Write extent figures as labels
        extent_figures_as_labels = extent + "_labeled"
        r.stats_zonal(
            flags="r",
            base=base,
            cover=extent,
            method="average",
            output=extent_figures_as_labels,
            overwrite=True,
            verbose=False,
            quiet=True,
        )
        g.rename(
            raster=(extent_figures_as_labels, extent),
            overwrite=True,
            quiet=True,
        )

        # Write land suitability scores as an ASCII file
        temporary_reclassified_base_map = temporary_filename(
            filename=reclassified_base)
        suitability_scores_as_labels = string_to_file(
            SUITABILITY_SCORES_LABELS,
            filename=temporary_reclassified_base_map)
        remove_files_at_exit(suitability_scores_as_labels)

        # Write scores as raster category labels
        r.reclass(
            input=base,
            output=base_scores,
            rules=suitability_scores_as_labels,
            overwrite=True,
            quiet=True,
            verbose=False,
        )
        remove_map_at_exit(base_scores)

        # Compute weighted extents
        weighted_expression = "@{extent} * float(@{scores})"
        weighted_expression = weighted_expression.format(extent=extent,
                                                         scores=base_scores)
        weighted_equation = EQUATION.format(result=weighted,
                                            expression=weighted_expression)
        r.mapcalc(weighted_equation, overwrite=True)

        # Write weighted extent figures as labels
        weighted_figures_as_labels = weighted + "_figures_as_labels"
        r.stats_zonal(
            flags="r",
            base=base,
            cover=weighted,
            method="average",
            output=weighted_figures_as_labels,
            overwrite=True,
            verbose=False,
            quiet=True,
        )
        g.rename(raster=(weighted_figures_as_labels, weighted),
                 overwrite=True,
                 quiet=True)

        # Get weighted extents in a dictionary
        weighted_extents = grass.parse_command("r.category",
                                               map=weighted,
                                               delimiter="\t")

        # Compute the sum of all weighted extents and add to dictionary
        category_sum = sum([
            float(x) if not math.isnan(float(x)) else 0
            for x in weighted_extents.values()
        ])
        weighted_extents["sum"] = category_sum

        # Create a map to hold fractions of each weighted extent to the sum
        # See also:
        # https://grasswiki.osgeo.org/wiki/LANDSAT#Hint:_Minimal_disk_space_copies
        r.reclass(
            input=base,
            output=fractions,
            rules="-",
            stdin="*=*",
            verbose=False,
            quiet=True,
        )

        # Compute weighted fractions of land types
        fraction_category_label = {
            key: float(value) / weighted_extents["sum"]
            for (key, value) in weighted_extents.items() if key is not "sum"
        }

        # Build fraction category and label rules for `r.category`
        fraction_rules = "\n".join([
            "{0}:{1}".format(key, value)
            for key, value in fraction_category_label.items()
        ])

        # Set rules
        r.category(map=fractions,
                   rules="-",
                   stdin=fraction_rules,
                   separator=":")

        # Assert that sum of fractions is ~1
        fraction_categories = grass.parse_command("r.category",
                                                  map=fractions,
                                                  delimiter="\t")

        fractions_sum = sum([
            float(x) if not math.isnan(float(x)) else 0
            for x in fraction_categories.values()
        ])
        msg = "*** Fractions: {f}".format(f=fraction_categories)
        grass.debug(_(msg))

        # g.message(_("Sum: {:.17g}".format(fractions_sum)))
        assert abs(fractions_sum - 1) < 1.0e-6, "Sum of fractions is != 1"

        # Compute flow
        flow_expression = "@{fractions} * @{flow}"
        flow_expression = flow_expression.format(fractions=fractions,
                                                 flow=flow_in_base)
        flow_equation = EQUATION.format(result=flow,
                                        expression=flow_expression)
        r.mapcalc(flow_equation, overwrite=True)

        # Write flow figures as raster category labels
        r.stats_zonal(
            base=reclassified_base,
            flags="r",
            cover=flow,
            method="sum",
            output=flow_in_category,
            overwrite=True,
            verbose=False,
            quiet=True,
        )

        # Parse flow categories and labels
        flow_categories = grass.parse_command(
            "r.category",
            map=flow_in_category,
            delimiter="\t",
            quiet=True,
        )
        grass.debug(_("*** Flow: {c}".format(c=flow_categories)))

        # Build flow category and label rules for `r.category`
        flow_rules = "\n".join([
            "{0}:{1}".format(key, value)
            for key, value in flow_categories.items()
        ])

        # Discard areas out of MASK

        # Check here again!
        # Output patch of all flow maps?

        temporary_flow_in_category = flow_in_category + "_temporary"
        copy_equation = EQUATION.format(result=temporary_flow_in_category,
                                        expression=flow_in_category)
        r.mapcalc(copy_equation, overwrite=True)
        g.rename(
            raster=(temporary_flow_in_category, flow_in_category),
            overwrite=True,
            quiet=True,
        )

        # Reassign cell category labels
        r.category(
            map=flow_in_category,
            rules="-",
            stdin=flow_rules,
            separator=":",
            quiet=True,
        )

        # Update title
        reclassified_base_title += " " + category
        r.support(flow_in_category, title=reclassified_base_title)

        # debugging
        # r.report(
        #     flags='hn',
        #     map=(flow_in_category),
        #     units=('k','c','p'),
        # )

        if print_only:

            grass.verbose(" * Flow in category {c}:".format(c=category))
            r.stats(
                input=(flow_in_category),
                output="-",
                flags="nacpl",
                separator=COMMA,
                quiet=True,
            )

        if not print_only:

            if flow_column_name:
                flow_column_prefix = flow_column_name + '_' + category
            else:
                flow_column_name = "flow"
                flow_column_prefix = flow_column_name + '_' + category

            # Produce vector map(s)
            if vector:

                update_vector(
                    vector=vector,
                    raster=flow_in_category,
                    methods=METHODS,
                    column_prefix=flow_column_prefix,
                )

                # update columns of an user-fed vector map
                # from the columns of vectorised flow-in-category raster map
                raster_to_vector(
                    raster_category_flow=flow_in_category,
                    vector_category_flow=flow_in_category,
                    flow_column_name=flow_column_name,
                    category=category,
                    type="area",
                )

            # get statistics
            dictionary = get_raster_statistics(
                map_one=aggregation,  # reclassified_base
                map_two=flow_in_category,
                separator="|",
                flags="nlcap",
            )

            # merge 'dictionary' with global 'statistics_dictionary'
            statistics_dictionary = merge_two_dictionaries(
                statistics_dictionary, dictionary)

        # It is important to remove the MASK!
        r.mask(flags="r", quiet=True)

    # Add the "reclassified_base" map to "remove_at_exit" here,
    # so as to be after all reclassified maps that derive from it
    remove_map_at_exit(reclassified_base)

    if not print_only:
        g.region(
            raster=aggregation,
            nsres=ns_resolution,
            ewres=ew_resolution,
            flags="a",
            quiet=True,
        )
        r.patch(
            flags="",
            input=flows,
            output=flow_in_reclassified_base,
            quiet=True,
        )
        remove_map_at_exit(flow_in_reclassified_base)

        if vector:
            # Patch all flow vector maps in one
            v.patch(
                flags="e",
                input=flows,
                output=flow_in_reclassified_base,
                overwrite=True,
                quiet=True,
            )

        # export to csv
        if supply_filename:
            nested_dictionary_to_csv(supply_filename, statistics_dictionary)

        if use_filename:
            uses = compile_use_table(statistics_dictionary)
            dictionary_to_csv(use_filename, uses)

    # Maybe return list of flow maps?  Requires unique flow map names
    return flows
Beispiel #40
0
def compute_occurrence(occurrence_maps, input_strds, input_maps, start, base,
                       count, tsuffix, mapset, where, reverse, range,
                       minimum_strds, maximum_strds, dbif):

    if minimum_strds:
        input_maps_minimum = input_strds.get_registered_maps_as_objects(where=where,
                                                                      dbif=dbif)
        minimum_maps = minimum_strds.get_registered_maps_as_objects(dbif=dbif)
        minimum_topo = tgis.SpatioTemporalTopologyBuilder()
        minimum_topo.build(input_maps_minimum, minimum_maps)

    if maximum_strds:
        input_maps_maximum = input_strds.get_registered_maps_as_objects(where=where,
                                                                      dbif=dbif)
        maximum_maps = maximum_strds.get_registered_maps_as_objects(dbif=dbif)
        maximum_topo = tgis.SpatioTemporalTopologyBuilder()
        maximum_topo.build(input_maps_maximum, maximum_maps)

    # Aggregate
    num_maps = len(input_maps)
    for i in xrange(num_maps):
        if reverse:
            map = input_maps[num_maps - i - 1]
        else:
            map = input_maps[i]

        # Compute the days since start
        input_start, input_end = map.get_temporal_extent_as_tuple()

        td = input_start - start
        if map.is_time_absolute():
            days = tgis.time_delta_to_relative_time(td)
        else:
            days = td

        if input_strds.get_temporal_type() == 'absolute' and tsuffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(map.temporal_extent.get_start_time(),
                                                      input_strds.get_granularity())
            occurrence_map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif input_strds.get_temporal_type() == 'absolute' and tsuffix == 'time':
            suffix = tgis.create_time_suffix(map)
            occurrence_map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            occurrence_map_name = tgis.create_numeric_suffix(base, count, tsuffix)

        occurrence_map_id = map.build_id(occurrence_map_name, mapset)
        occurrence_map = input_strds.get_new_map_instance(occurrence_map_id)

        # Check if new map is in the temporal database
        if occurrence_map.is_in_db(dbif):
            if grass.overwrite():
                # Remove the existing temporal database entry
                occurrence_map.delete(dbif)
                occurrence_map = input_strds.get_new_map_instance(occurrence_map_id)
            else:
                grass.fatal(_("Map <%s> is already registered in the temporal"
                             " database, use overwrite flag to overwrite.") %
                            (occurrence_map.get_map_id()))

        range_vals = range.split(",")
        min = range_vals[0]
        max = range_vals[1]

        if minimum_strds:
            relations = input_maps_minimum[i].get_temporal_relations()
            for relation in range_relations:
                if relation in relations:
                    min = str(relations[relation][0].get_id())
                    break

        if maximum_strds:
            relations = input_maps_maximum[i].get_temporal_relations()
            for relation in range_relations:
                if relation in relations:
                    max = str(relations[relation][0].get_id())
                    break

        expression = "%s = if(%s > %s && %s < %s, %s, null())"%(occurrence_map_name,
                                                                map.get_name(),
                                                                min, map.get_name(),
                                                                max, days)
        grass.debug(expression)
        grass.mapcalc(expression, overwrite=True)

        map_start, map_end = map.get_temporal_extent_as_tuple()

        if map.is_time_absolute():
            occurrence_map.set_absolute_time(map_start, map_end)
        else:
            occurrence_map.set_relative_time(map_start, map_end,
                                         map.get_relative_time_unit())

        # Store the new maps
        occurrence_maps[map.get_id()] = occurrence_map

        count += 1

    return count
Beispiel #41
0
def main():
    # set the home path
    home = os.path.expanduser('~')
    # check if user is in GRASS
    gisbase = os.getenv('GISBASE')
    if not gisbase:
        grass.fatal(_('$GISBASE not defined'))
        return 0
    # check ssh
    if not grass.find_program('ssh', '-V'):
        grass.fatal(
            _("%s required. Please install '%s' first.") % ('ssh', 'ssh'))
        return 0
    # parse the grassdata, location e mapset
    variables = grass.core.gisenv()
    # check the version
    version = grass.core.version()
    # this is would be set automatically
    if version['version'].find('7.') != -1:
        grassVersion = 'grass%s%s' % (version['version'][0],
                                      version['version'][2])
        session_path = '.grass%s' % version['version'][0]
    else:
        grass.fatal(_('You are not in a GRASS GIS version 7 session'))
        return 0
    # set the path of grassdata/location/mapset
    # set to .grass7 folder
    path = os.path.join(home, session_path, 'g.cloud')
    if not os.path.exists(path):
        os.makedirs(path)
    # set username, password and folder if settings are inserted by stdin
    if options['config'] == '-':
        user = raw_input(_('Insert username: '******'config']):
            grass.fatal(_('The file %s doesn\'t exist' % options['config']))
        if stat.S_IMODE(os.stat(options['config']).st_mode) == int('0600', 8):
            filesett = open(options['config'], 'r')
            fileread = filesett.readlines()
            user = fileread[0].strip()
            passwd = fileread[1].strip()
            filesett.close()
        else:
            err = 'The file permissions of %s are considered insecure.\n' % options[
                'config']
            err = 'Please correct permissions to read/write only for user (mode 600)'
            grass.fatal(_(err))
            return 0
    # server option
    server = options['server']
    # lazy import
    import cloud_ssh as sshs
    # create the sshs session
    ssh_conn = sshs.ssh_session(user, server, session_path, passwd)
    if flags['a']:
        ssh_conn.add()
    # check if the server has grass and qsub installed, and return the home
    if options['path'] == '$HOME':
        serverHome = serverCheck(ssh_conn, grassVersion)
    else:
        serverHome = options['path']
    if options['reconnect']:
        reconnect(ssh_conn, options['reconnect'], path, variables, serverHome)
    else:
        if options['grass_script']:
            if not os.path.exists(options['grass_script']):
                grass.fatal(
                    _("File %s does not exists" % options['grass_script']))
            else:
                grassScript = options['grass_script']
                nameGrassScript = os.path.split(grassScript)[-1]
        else:
            grass.fatal(_("You have to set %s option") % 'grass_script')
        if options['qsub_script']:
            if not os.path.exists(options['qsub_script']):
                grass.fatal(
                    _("File %s does not exists" % options['qsub_script']))
            else:
                qsubScript = options['qsub_script']
                nameQsubScript = os.path.split(qsubScript)[-1]
        else:
            grass.fatal(_("You have to set %s option") % 'qsub_script')
        # the pid of process to have unique value
        pid = os.path.split(tempfile.mkstemp()[1])[-1]
        # name for the unique folder
        serverFolder = os.path.join(serverHome, 'gcloud%s' % pid)
        ssh_conn.ssh('mkdir %s' % serverFolder)
        serverGISDBASE = os.path.split(variables['GISDBASE'])[-1] + str(pid)
        permanent = os.path.join(variables['GISDBASE'],
                                 variables['LOCATION_NAME'], 'PERMANENT')
        # create the new path for $home/GRASSDATA/LOCATION/PERMANENT on the server
        new_perm = os.path.join(serverHome, serverGISDBASE,
                                variables['LOCATION_NAME'], 'PERMANENT')
        ssh_conn.ssh('mkdir -p %s' % new_perm)
        tar = tarfile.open("PERMANENT.tar.gz", "w:gz")
        tar.add(os.path.join(permanent, 'PROJ_INFO'), 'PROJ_INFO')
        tar.add(os.path.join(permanent, 'PROJ_UNITS'), 'PROJ_UNITS')
        tar.add(os.path.join(permanent, 'PROJ_EPSG'), 'PROJ_EPSG')
        tar.add(os.path.join(permanent, 'DEFAULT_WIND'), 'DEFAULT_WIND')
        tar.add(os.path.join(permanent, 'WIND'), 'WIND')
        if os.path.isfile(os.path.join(permanent, 'VAR')):
            tar.add(os.path.join(permanent, 'VAR'), 'VAR')
        tar.close()
        ssh_conn.scp('PERMANENT.tar.gz', serverHome)
        ssh_conn.ssh('tar -C %s -xzf PERMANENT.tar.gz' % new_perm)
        ssh_conn.ssh('rm -f PERMANENT.tar.gz')
        os.remove('PERMANENT.tar.gz')

        if options['raster'] != '':
            rasters = options['raster'].split(',')
            copyMaps(ssh_conn, rasters, 'raster', serverFolder)
        if options['vector'] != '':
            rasters = options['vector'].split(',')
            copyMaps(ssh_conn, rasters, 'vector', serverFolder)
        # copy the scripts to the server
        tar = tarfile.open("script_gcloud.tar.gz", "w:gz")
        if options['raster'] != '' or options['vector'] != '':
            tar.add(os.path.join(cloudpath, 'cloud_unpack.py'),
                    'cloud_unpack.py')
            tar.add(os.path.join(cloudpath, 'cloud_which.py'),
                    'cloud_which.py')
        tar.add(os.path.join(cloudpath, 'cloud_collect.sh'),
                'cloud_collect.sh')
        tar.add(os.path.join(cloudpath, 'cloud_mail.sh'), 'cloud_mail.sh')
        tar.add(grassScript, nameGrassScript)
        tar.add(qsubScript, nameQsubScript)
        tar.close()
        ssh_conn.scp("script_gcloud.tar.gz", serverHome)
        ssh_conn.ssh('tar -C %s -xzf script_gcloud.tar.gz' % serverFolder)
        ssh_conn.ssh('rm -f script_gcloud.tar.gz')
        os.remove('script_gcloud.tar.gz')
        if options['raster'] != '' or options['vector'] != '':
            grass.debug(
                "Launching cloud_unpack.py with this parameters: %s, %s, %s" %
                (serverFolder, python, new_perm),
                debug=2)
            ssh_conn.ssh('"cd %s ; %s cloud_unpack.py %s"' %
                         (serverFolder, python, new_perm))
        qsubid = os.path.join(serverFolder, 'tmpqsub')
        grass.debug("The pid of job is %s" % (str(pid)), debug=2)
        if options['variables'] != '':
            vari = ast.literal_eval(options['variables'])
            values = vari.values()
            keys = vari.keys()
            if flags['c']:
                values = variablesCheckCicle(values)
            else:
                values = variablesCheck(values)
            njobs = 0
            for val in range(len(values)):
                launchstr = '"cd %s ; qsub -v MYPID=%s -v MYLD_LIBRARY_PATH=$LD_LIBRARY_PATH ' \
                  '-v GRASSDBASE=%s -v MYLOC=%s -v GRASSCRIPT=%s' % (
                  serverFolder, pid, os.path.join(serverHome, serverGISDBASE),
                  variables['LOCATION_NAME'], os.path.join(serverFolder,nameGrassScript)
                )
                for k in range(len(keys)):
                    launchstr += ' -v %s=%s' % (str(
                        keys[k]), str(values[val][k]))
                launchstr += ' %s >> %s' % (os.path.join(
                    serverFolder, nameQsubScript), qsubid)
                ssh_conn.ssh(launchstr)
                njobs += 1
            grass.message(_('Launching %i jobs...' % njobs))
        else:
            launchstr = 'cd %s ; qsub -v MYPID=%s -v MYLD_LIBRARY_PATH=$LD_LIBRARY_PATH ' \
             '-v GRASSDBASE=%s -v MYLOC=%s -v GRASSCRIPT=%s %s > %s' % (
             serverFolder, pid, os.path.join(serverHome, serverGISDBASE),
             variables['LOCATION_NAME'], os.path.join(serverFolder,nameGrassScript),
             os.path.join(serverFolder, nameQsubScript), qsubid
            )
            ssh_conn.ssh(launchstr)
            grass.message(_('Launching a single job...'))

        #if options['mail']:
        #strin = "\"cd %s ; qsub -v MYPID=%s -v MYLD_LIBRARY_PATH=$LD_LIBRARY_PATH -hold_jid "
        #strin += "-hold_jid `cat %s | tr '\n' ',' | sed 's+,$++g'` %s %s %s %s\""
        #ssh_conn.ssh(strin % ( serverFolder, pid, qsubid,
        #os.path.join(serverFolder, 'cloud_collect.sh'),
        #os.path.join(serverHome, serverGISDBASE),
        #variables['LOCATION_NAME'], options['mail'])
        #)
        #else:
        #strin = "\"cd %s ; qsub -v MYPID=%s -v MYLD_LIBRARY_PATH=$LD_LIBRARY_PATH -hold_jid "
        #strin += "-hold_jid `cat %s | tr '\n' ',' | sed 's+,$++g'` %s %s %s\""
        #ssh_conn.ssh(strin % ( serverFolder, pid, qsubid,
        #os.path.join(serverFolder, 'cloud_collect.sh'),
        #os.path.join(serverHome, serverGISDBASE),
        #variables['LOCATION_NAME'])
        #)
        if options['mail']:
            mail = options['mail']
        else:
            mail = "NOOO"
        if flags['k']:
            remove = "NOOO"
        else:
            remove = "yes"
        ids = ssh_conn.ssh(
            "cat %s | cut -d' ' -f3 | tr '\n' ',' | sed 's+,$++g'" % qsubid)
        #'string %(s)s' % {'s':1}
        collectstr = "\"cd %s ; qsub -v MYPID=%s -v MYLD_LIBRARY_PATH=$LD_LIBRARY_PATH " % (
            serverFolder, pid)
        collectstr += "-hold_jid %s %s %s %s %s %s %s\"" % (
            ids, os.path.join(serverFolder, 'cloud_collect.sh'),
            os.path.join(serverHome, serverGISDBASE),
            variables['LOCATION_NAME'], mail, remove, pid)
        ssh_conn.ssh(collectstr)
        grass.message(
            _('If you want to reconnect to this job to see its status please use the reconnect options with this value: %s'
              % pid))
        grass.message(
            _('   g.cloud config=path|- server=host reconnect=%s' % pid))
    ssh_conn.close()
Beispiel #42
0
def main():
    # Get the options
    input = options["input"]
    start = options["start"]
    stop = options["stop"]
    base = options["basename"]
    cycle = options["cycle"]
    offset = options["offset"]
    minimum = options["minimum"]
    maximum = options["maximum"]
    occurrence = options["occurrence"]
    range = options["range"]
    indicator = options["indicator"]
    staend = options["staend"]
    register_null = flags["n"]
    reverse = flags["r"]
    time_suffix = options["suffix"]

    grass.set_raise_on_error(True)

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    mapset = tgis.get_current_mapset()

    if input.find("@") >= 0:
        id = input
    else:
        id = input + "@" + mapset

    input_strds = tgis.SpaceTimeRasterDataset(id)

    if input_strds.is_in_db() == False:
        dbif.close()
        grass.fatal(_("Space time %s dataset <%s> not found") % (
            input_strds.get_output_map_instance(None).get_type(), id))

    input_strds.select(dbif)
    dummy = input_strds.get_new_map_instance(None)

    # The occurrence space time raster dataset
    if occurrence:
        if not minimum or not maximum:
            if not range:
                dbif.close()
                grass.fatal(_("You need to set the range to compute the occurrence"
                              " space time raster dataset"))

        if occurrence.find("@") >= 0:
            occurrence_id = occurrence
        else:
            occurrence_id = occurrence + "@" + mapset

        occurrence_strds = tgis.SpaceTimeRasterDataset(occurrence_id)
        if occurrence_strds.is_in_db(dbif):
            if not grass.overwrite():
                dbif.close()
                grass.fatal(_("Space time raster dataset <%s> is already in the "
                              "database, use overwrite flag to overwrite") % occurrence_id)

    # The indicator space time raster dataset
    if indicator:
        if not occurrence:
            dbif.close()
            grass.fatal(_("You need to set the occurrence to compute the indicator"
                          " space time raster dataset"))
        if not staend:
            dbif.close()
            grass.fatal(_("You need to set the staend options to compute the indicator"
                          " space time raster dataset"))
        if indicator.find("@") >= 0:
            indicator = indicator
        else:
            indicator_id = indicator + "@" + mapset

        indicator_strds = tgis.SpaceTimeRasterDataset(indicator_id)
        if indicator_strds.is_in_db(dbif):
            if not grass.overwrite():
                dbif.close()
                grass.fatal(_("Space time raster dataset <%s> is already in the "
                              "database, use overwrite flag to overwrite") % indicator_id)
        staend = staend.split(",")
        indicator_start = int(staend[0])
        indicator_mid = int(staend[1])
        indicator_end = int(staend[2])

    # The minimum threshold space time raster dataset
    minimum_strds = None
    if minimum:
        if minimum.find("@") >= 0:
            minimum_id = minimum
        else:
            minimum_id = minimum + "@" + mapset

        minimum_strds = tgis.SpaceTimeRasterDataset(minimum_id)
        if minimum_strds.is_in_db() == False:
            dbif.close()
            grass.fatal(_("Space time raster dataset <%s> not found") % (minimum_strds.get_id()))

        if minimum_strds.get_temporal_type() != input_strds.get_temporal_type():
            dbif.close()
            grass.fatal(_("Temporal type of input strds and minimum strds must be equal"))

        minimum_strds.select(dbif)

    # The maximum threshold space time raster dataset
    maximum_strds = None
    if maximum:
        if maximum.find("@") >= 0:
            maximum_id = maximum
        else:
            maximum_id = maximum + "@" + mapset

        maximum_strds = tgis.SpaceTimeRasterDataset(maximum_id)
        if maximum_strds.is_in_db() == False:
            dbif.close()
            grass.fatal(_("Space time raster dataset <%s> not found") % (maximum_strds.get_id()))

        if maximum_strds.get_temporal_type() != input_strds.get_temporal_type():
            dbif.close()
            grass.fatal(_("Temporal type of input strds and maximum strds must be equal"))

        maximum_strds.select(dbif)

    input_strds_start, input_strds_end = input_strds.get_temporal_extent_as_tuple()

    if input_strds.is_time_absolute():
        start = tgis.string_to_datetime(start)
        if stop:
            stop = tgis.string_to_datetime(stop)
        else:
            stop = input_strds_end
    else:
        start = int(start)
        if stop:
            stop = int(stop)
        else:
            stop = input_strds_end

    if input_strds.is_time_absolute():
        end = tgis.increment_datetime_by_string(start, cycle)
    else:
        end = start + cycle

    count = 1
    indi_count = 1
    occurrence_maps = {}
    indicator_maps = {}

    while input_strds_end > start and stop > start:

        # Make sure that the cyclic computation will stop at the correct time
        if stop and end > stop:
            end = stop

        where = "start_time >= \'%s\' AND start_time < \'%s\'"%(str(start),
                                                                str(end))
        input_maps = input_strds.get_registered_maps_as_objects(where=where,
                                                                dbif=dbif)

        grass.debug(len(input_maps))

        input_topo = tgis.SpatioTemporalTopologyBuilder()
        input_topo.build(input_maps, input_maps)

        if len(input_maps) == 0:
            continue

        grass.message(_("Processing cycle %s - %s"%(str(start), str(end))))

        count = compute_occurrence(occurrence_maps, input_strds, input_maps,
                                   start, base, count, time_suffix, mapset,
                                   where, reverse, range, minimum_strds,
                                   maximum_strds, dbif)

        # Indicator computation is based on the occurrence so we need to start it after
        # the occurrence cycle
        if indicator:
            num_maps = len(input_maps)
            for i in xrange(num_maps):
                if reverse:
                    map = input_maps[num_maps - i - 1]
                else:
                    map = input_maps[i]

                if input_strds.get_temporal_type() == 'absolute' and time_suffix == 'gran':
                    suffix = tgis.create_suffix_from_datetime(map.temporal_extent.get_start_time(),
                                                              input_strds.get_granularity())
                    indicator_map_name = "{ba}_indicator_{su}".format(ba=base, su=suffix)
                elif input_strds.get_temporal_type() == 'absolute' and time_suffix == 'time':
                    suffix = tgis.create_time_suffix(map)
                    indicator_map_name = "{ba}_indicator_{su}".format(ba=base, su=suffix)
                else:
                    indicator_map_name = tgis.create_numeric_suffix(base + "_indicator",
                                                                    indi_count, time_suffix)
                indicator_map_id = dummy.build_id(indicator_map_name, mapset)
                indicator_map = input_strds.get_new_map_instance(indicator_map_id)

                # Check if new map is in the temporal database
                if indicator_map.is_in_db(dbif):
                    if grass.overwrite():
                        # Remove the existing temporal database entry
                        indicator_map.delete(dbif)
                        indicator_map = input_strds.get_new_map_instance(indicator_map_id)
                    else:
                        grass.fatal(_("Map <%s> is already registered in the temporal"
                                     " database, use overwrite flag to overwrite.") %
                                    (indicator_map.get_map_id()))

                curr_map = occurrence_maps[map.get_id()].get_name()

                # Reverse time
                if reverse:
                    if i ==  0:
                        prev_map = curr_map
                        subexpr1 = "null()"
                        subexpr3 = "%i"%(indicator_start)
                    elif i > 0 and i < num_maps - 1:
                        prev_map = occurrence_maps[map.next().get_id()].get_name()
                        next_map = occurrence_maps[map.prev().get_id()].get_name()
                        # In case the previous map is null() set null() or the start indicator
                        subexpr1 = "if(isnull(%s), null(), %i)"%(curr_map, indicator_start)
                        # In case the previous map was not null() if the current map is null() set null()
                        # if the current map is not null() and the next map is not null() set
                        # intermediate indicator, if the next map is null set the end indicator
                        subexpr2 = "if(isnull(%s), %i, %i)"%(next_map, indicator_end, indicator_mid)
                        subexpr3 = "if(isnull(%s), null(), %s)"%(curr_map, subexpr2)
                        expression = "%s = if(isnull(%s), %s, %s)"%(indicator_map_name,
                                                                    prev_map, subexpr1,
                                                                    subexpr3)
                    else:
                        prev_map = occurrence_maps[map.next().get_id()].get_name()
                        subexpr1 = "if(isnull(%s), null(), %i)"%(curr_map, indicator_start)
                        subexpr3 = "if(isnull(%s), null(), %i)"%(curr_map, indicator_mid)
                else:
                    if i == 0:
                        prev_map = curr_map
                        subexpr1 = "null()"
                        subexpr3 = "%i"%(indicator_start)
                    elif i > 0 and i < num_maps - 1:
                        prev_map = occurrence_maps[map.prev().get_id()].get_name()
                        next_map = occurrence_maps[map.next().get_id()].get_name()
                        # In case the previous map is null() set null() or the start indicator
                        subexpr1 = "if(isnull(%s), null(), %i)"%(curr_map, indicator_start)
                        # In case the previous map was not null() if the current map is null() set null()
                        # if the current map is not null() and the next map is not null() set
                        # intermediate indicator, if the next map is null set the end indicator
                        subexpr2 = "if(isnull(%s), %i, %i)"%(next_map, indicator_end, indicator_mid)
                        subexpr3 = "if(isnull(%s), null(), %s)"%(curr_map, subexpr2)
                        expression = "%s = if(isnull(%s), %s, %s)"%(indicator_map_name,
                                                                    prev_map, subexpr1,
                                                                    subexpr3)
                    else:
                        prev_map = occurrence_maps[map.prev().get_id()].get_name()
                        subexpr1 = "if(isnull(%s), null(), %i)"%(curr_map, indicator_start)
                        subexpr3 = "if(isnull(%s), null(), %i)"%(curr_map, indicator_mid)

                expression = "%s = if(isnull(%s), %s, %s)"%(indicator_map_name,
                                                            prev_map, subexpr1,
                                                            subexpr3)
                grass.debug(expression)
                grass.mapcalc(expression, overwrite=True)

                map_start, map_end = map.get_temporal_extent_as_tuple()

                if map.is_time_absolute():
                    indicator_map.set_absolute_time(map_start, map_end)
                else:
                    indicator_map.set_relative_time(map_start, map_end,
                                                 map.get_relative_time_unit())

                indicator_maps[map.get_id()] = indicator_map
                indi_count += 1

        # Increment the cycle
        start = end
        if input_strds.is_time_absolute():
            start = end
            if offset:
                start = tgis.increment_datetime_by_string(end, offset)
            end = tgis.increment_datetime_by_string(start, cycle)
        else:
            if offset:
                start = end + offset
            end = start + cycle

    empty_maps = []

    create_strds_register_maps(input_strds, occurrence_strds, occurrence_maps,
                               register_null, empty_maps, dbif)

    if indicator:
        create_strds_register_maps(input_strds, indicator_strds, indicator_maps,
                                   register_null, empty_maps, dbif)

    dbif.close()

    # Remove empty maps
    if len(empty_maps) > 0:
        for map in empty_maps:
            grass.run_command("g.remove", flags='f', type="raster",  name=map.get_name(), quiet=True)
Beispiel #43
0
    def _download(self):
        """!Downloads data from WMS server using GDAL WMS driver

        @return temp_map with stored downloaded data
        """
        grass.message("Downloading data from WMS server...")

        # GDAL WMS driver does not flip geographic coordinates
        # according to WMS standard 1.3.0.
        if (
            "+proj=latlong" in self.proj_srs or "+proj=longlat" in self.proj_srs
        ) and self.params["wms_version"] == "1.3.0":
            grass.warning(
                _(
                    "If module will not be able to fetch the data in this "
                    + "geographic projection, \n try 'WMS_GRASS' driver or use WMS version 1.1.1."
                )
            )

        self._debug("_download", "started")
        temp_map = self._tempfile()

        xml_file = self._createXML()

        # print xml file content for debug level 1
        file = open(xml_file, "r")
        grass.debug("WMS request XML:\n%s" % file.read(), 1)
        file.close()

        if self.proxy:
            gdal.SetConfigOption("GDAL_HTTP_PROXY", str(self.proxy))
        if self.proxy_user_pw:
            gdal.SetConfigOption("GDAL_HTTP_PROXYUSERPWD", str(self.proxy_user_pw))
        wms_dataset = gdal.Open(xml_file, gdal.GA_ReadOnly)
        grass.try_remove(xml_file)
        if wms_dataset is None:
            grass.fatal(_("Unable to open GDAL WMS driver"))

        self._debug("_download", "GDAL dataset created")

        driver = gdal.GetDriverByName(self.gdal_drv_format)
        if driver is None:
            grass.fatal(_("Unable to find %s driver" % format))

        metadata = driver.GetMetadata()
        if (
            gdal.DCAP_CREATECOPY not in metadata
            or metadata[gdal.DCAP_CREATECOPY] == "NO"
        ):
            grass.fatal(
                _("Driver %s supports CreateCopy() method.") % self.gdal_drv_name
            )

        self._debug("_download", "calling GDAL CreateCopy...")

        temp_map_dataset = driver.CreateCopy(temp_map, wms_dataset, 0)

        if temp_map_dataset is None:
            grass.fatal(_("Incorrect WMS query"))

        temp_map_dataset = None
        wms_dataset = None

        self._debug("_download", "finished")

        return temp_map
Beispiel #44
0
def main():

    global TMPLOC, SRCGISRC, TGTGISRC, GISDBASE
    global tile, tmpdir, in_temp, currdir, tmpregionname

    in_temp = False

    url = options['url']
    username = options['username']
    password = options['password']
    local = options['local']
    output = options['output']
    memory = options['memory']
    fillnulls = flags['n']
    srtmv3 = (flags['2'] == 0)
    one = flags['1']
    dozerotile = flags['z']
    reproj_res = options['resolution']

    overwrite = grass.overwrite()

    res = '00:00:03'
    if srtmv3:
        fillnulls = 0
        if one:
            res = '00:00:01'
    else:
        one = None

    if len(local) == 0:
        if len(url) == 0:
            if srtmv3:
                if one:
                    url = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11/'
                else:
                    url = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL3.003/2000.02.11/'
            else:
                url = 'http://dds.cr.usgs.gov/srtm/version2_1/SRTM3/'

    if len(local) == 0:
        local = None

    # are we in LatLong location?
    s = grass.read_command("g.proj", flags='j')
    kv = grass.parse_key_val(s)

    if fillnulls == 1 and memory <= 0:
        grass.warning(
            _("Amount of memory to use for interpolation must be positive, setting to 300 MB"
              ))
        memory = '300'

    # make a temporary directory
    tmpdir = grass.tempfile()
    grass.try_remove(tmpdir)
    os.mkdir(tmpdir)
    currdir = os.getcwd()
    pid = os.getpid()

    # change to temporary directory
    os.chdir(tmpdir)
    in_temp = True
    if local is None:
        local = tmpdir

    # save region
    tmpregionname = 'r_in_srtm_tmp_region'
    grass.run_command('g.region', save=tmpregionname, overwrite=overwrite)

    # get extents
    if kv['+proj'] == 'longlat':
        reg = grass.region()
    else:
        if not options['resolution']:
            grass.fatal(
                _("The <resolution> must be set if the projection is not 'longlat'."
                  ))
        reg2 = grass.parse_command('g.region', flags='uplg')
        north = [float(reg2['ne_lat']), float(reg2['nw_lat'])]
        south = [float(reg2['se_lat']), float(reg2['sw_lat'])]
        east = [float(reg2['ne_long']), float(reg2['se_long'])]
        west = [float(reg2['nw_long']), float(reg2['sw_long'])]
        reg = {}
        if np.mean(north) > np.mean(south):
            reg['n'] = max(north)
            reg['s'] = min(south)
        else:
            reg['n'] = min(north)
            reg['s'] = max(south)
        if np.mean(west) > np.mean(east):
            reg['w'] = max(west)
            reg['e'] = min(east)
        else:
            reg['w'] = min(west)
            reg['e'] = max(east)
        # get actual location, mapset, ...
        grassenv = grass.gisenv()
        tgtloc = grassenv['LOCATION_NAME']
        tgtmapset = grassenv['MAPSET']
        GISDBASE = grassenv['GISDBASE']
        TGTGISRC = os.environ['GISRC']

    if kv['+proj'] != 'longlat':
        SRCGISRC, TMPLOC = createTMPlocation()
    if options['region'] is None or options['region'] == '':
        north = reg['n']
        south = reg['s']
        east = reg['e']
        west = reg['w']
    else:
        west, south, east, north = options['region'].split(',')
        west = float(west)
        south = float(south)
        east = float(east)
        north = float(north)

    # adjust extents to cover SRTM tiles: 1 degree bounds
    tmpint = int(north)
    if tmpint < north:
        north = tmpint + 1
    else:
        north = tmpint

    tmpint = int(south)
    if tmpint > south:
        south = tmpint - 1
    else:
        south = tmpint

    tmpint = int(east)
    if tmpint < east:
        east = tmpint + 1
    else:
        east = tmpint

    tmpint = int(west)
    if tmpint > west:
        west = tmpint - 1
    else:
        west = tmpint

    if north == south:
        north += 1
    if east == west:
        east += 1

    rows = abs(north - south)
    cols = abs(east - west)
    ntiles = rows * cols
    grass.message(_("Importing %d SRTM tiles...") % ntiles, flag='i')
    counter = 1

    srtmtiles = ''
    valid_tiles = 0
    for ndeg in range(south, north):
        for edeg in range(west, east):
            grass.percent(counter, ntiles, 1)
            counter += 1
            if ndeg < 0:
                tile = 'S'
            else:
                tile = 'N'
            tile = tile + '%02d' % abs(ndeg)
            if edeg < 0:
                tile = tile + 'W'
            else:
                tile = tile + 'E'
            tile = tile + '%03d' % abs(edeg)
            grass.debug("Tile: %s" % tile, debug=1)

            if local != tmpdir:
                gotit = import_local_tile(tile, local, pid, srtmv3, one)
            else:
                gotit = download_tile(tile, url, pid, srtmv3, one, username,
                                      password)
                if gotit == 1:
                    gotit = import_local_tile(tile, tmpdir, pid, srtmv3, one)
            if gotit == 1:
                grass.verbose(_("Tile %s successfully imported") % tile)
                valid_tiles += 1
            elif dozerotile:
                # create tile with zeros
                if one:
                    # north
                    if ndeg < -1:
                        tmpn = '%02d:59:59.5S' % (abs(ndeg) - 2)
                    else:
                        tmpn = '%02d:00:00.5N' % (ndeg + 1)
                    # south
                    if ndeg < 1:
                        tmps = '%02d:00:00.5S' % abs(ndeg)
                    else:
                        tmps = '%02d:59:59.5N' % (ndeg - 1)
                    # east
                    if edeg < -1:
                        tmpe = '%03d:59:59.5W' % (abs(edeg) - 2)
                    else:
                        tmpe = '%03d:00:00.5E' % (edeg + 1)
                    # west
                    if edeg < 1:
                        tmpw = '%03d:00:00.5W' % abs(edeg)
                    else:
                        tmpw = '%03d:59:59.5E' % (edeg - 1)
                else:
                    # north
                    if ndeg < -1:
                        tmpn = '%02d:59:58.5S' % (abs(ndeg) - 2)
                    else:
                        tmpn = '%02d:00:01.5N' % (ndeg + 1)
                    # south
                    if ndeg < 1:
                        tmps = '%02d:00:01.5S' % abs(ndeg)
                    else:
                        tmps = '%02d:59:58.5N' % (ndeg - 1)
                    # east
                    if edeg < -1:
                        tmpe = '%03d:59:58.5W' % (abs(edeg) - 2)
                    else:
                        tmpe = '%03d:00:01.5E' % (edeg + 1)
                    # west
                    if edeg < 1:
                        tmpw = '%03d:00:01.5W' % abs(edeg)
                    else:
                        tmpw = '%03d:59:58.5E' % (edeg - 1)

                grass.run_command('g.region',
                                  n=tmpn,
                                  s=tmps,
                                  e=tmpe,
                                  w=tmpw,
                                  res=res)
                grass.run_command('r.mapcalc',
                                  expression="%s = 0" %
                                  (tile + '.r.in.srtm.tmp.' + str(pid)),
                                  quiet=True)
                grass.run_command('g.region', region=tmpregionname)

    # g.list with sep = comma does not work ???
    pattern = '*.r.in.srtm.tmp.%d' % pid
    srtmtiles = grass.read_command('g.list',
                                   type='raster',
                                   pattern=pattern,
                                   sep='newline',
                                   quiet=True)

    srtmtiles = srtmtiles.splitlines()
    srtmtiles = ','.join(srtmtiles)
    grass.debug("'List of Tiles: %s" % srtmtiles, debug=1)

    if valid_tiles == 0:
        grass.run_command('g.remove',
                          type='raster',
                          name=str(srtmtiles),
                          flags='f',
                          quiet=True)
        grass.warning(_("No tiles imported"))
        if local != tmpdir:
            grass.fatal(
                _("Please check if local folder <%s> is correct.") % local)
        else:
            grass.fatal(
                _("Please check internet connection, credentials, and if url <%s> is correct."
                  ) % url)

    grass.run_command('g.region', raster=str(srtmtiles))

    grass.message(_("Patching tiles..."))
    if fillnulls == 0:
        if valid_tiles > 1:
            if kv['+proj'] != 'longlat':
                grass.run_command('r.buildvrt', input=srtmtiles, output=output)
            else:
                grass.run_command('r.patch', input=srtmtiles, output=output)
        else:
            grass.run_command('g.rename',
                              raster='%s,%s' % (srtmtiles, output),
                              quiet=True)
    else:
        ncells = grass.region()['cells']
        if long(ncells) > 1000000000:
            grass.message(
                _("%s cells to interpolate, this will take some time") %
                str(ncells),
                flag='i')
        if kv['+proj'] != 'longlat':
            grass.run_command('r.buildvrt',
                              input=srtmtiles,
                              output=output + '.holes')
        else:
            grass.run_command('r.patch',
                              input=srtmtiles,
                              output=output + '.holes')
        mapstats = grass.parse_command('r.univar',
                                       map=output + '.holes',
                                       flags='g',
                                       quiet=True)
        if mapstats['null_cells'] == '0':
            grass.run_command('g.rename',
                              raster='%s,%s' % (output + '.holes', output),
                              quiet=True)
        else:
            grass.run_command('r.resamp.bspline',
                              input=output + '.holes',
                              output=output + '.interp',
                              se='0.0025',
                              sn='0.0025',
                              method='linear',
                              memory=memory,
                              flags='n')
            grass.run_command('r.patch',
                              input='%s,%s' %
                              (output + '.holes', output + '.interp'),
                              output=output + '.float',
                              flags='z')
            grass.run_command('r.mapcalc',
                              expression='%s = round(%s)' %
                              (output, output + '.float'))
            grass.run_command(
                'g.remove',
                type='raster',
                name='%s,%s,%s' %
                (output + '.holes', output + '.interp', output + '.float'),
                flags='f',
                quiet=True)

    # switch to target location
    if kv['+proj'] != 'longlat':
        os.environ['GISRC'] = str(TGTGISRC)
        # r.proj
        grass.message(_("Reprojecting <%s>...") % output)
        kwargs = {
            'location': TMPLOC,
            'mapset': 'PERMANENT',
            'input': output,
            'memory': memory,
            'resolution': reproj_res
        }
        if options['method']:
            kwargs['method'] = options['method']
        try:
            grass.run_command('r.proj', **kwargs)
        except CalledModuleError:
            grass.fatal(_("Unable to to reproject raster <%s>") % output)
    else:
        if fillnulls != 0:
            grass.run_command('g.remove',
                              type='raster',
                              pattern=pattern,
                              flags='f',
                              quiet=True)

    # nice color table
    grass.run_command('r.colors', map=output, color='srtm', quiet=True)

    # write metadata:
    tmphist = grass.tempfile()
    f = open(tmphist, 'w+')
    f.write(os.environ['CMDLINE'])
    f.close()
    if srtmv3:
        source1 = 'SRTM V3'
    else:
        source1 = 'SRTM V2.1'
    grass.run_command('r.support',
                      map=output,
                      loadhistory=tmphist,
                      description='generated by r.in.srtm.region',
                      source1=source1,
                      source2=(local if local != tmpdir else url))
    grass.try_remove(tmphist)

    grass.message(_("Done: generated map <%s>") % output)
Beispiel #45
0
def zerofy_and_normalise_component(components, threshhold, output_name):
    """
    Sums up all maps listed in the given "components" object and derives a
    normalised output.

    To Do:

    * Improve `threshold` handling. What if threshholding is not desired? How
    to skip performing it?

    Parameters
    ----------
    components :
        Input list of raster maps (components)

    threshhold :
        Reference value for which to flatten all smaller raster pixel values to
        zero

    output_name :
        Name of output raster map

    Returns
    -------
    ...

    Examples
    --------
    ...
    """
    msg = " * Normalising sum of: "
    msg += ",".join(components)
    grass.debug(_(msg))
    grass.verbose(_(msg))

    if len(components) > 1:

        # prepare string for mapcalc expression
        components = [name.split("@")[0] for name in components]
        components_string = SPACY_PLUS.join(components)
        components_string = components_string.replace(" ", "")
        components_string = components_string.replace("+", "_")

        # temporary map names
        tmp_intermediate = temporary_filename(filename=components_string)
        tmp_output = temporary_filename(filename=components_string)

        # build mapcalc expression
        component_expression = SPACY_PLUS.join(components)
        component_equation = EQUATION.format(result=tmp_intermediate,
                                             expression=component_expression)

        grass.mapcalc(component_equation, overwrite=True)

    elif len(components) == 1:
        # temporary map names, if components contains one element
        tmp_intermediate = components[0]
        tmp_output = temporary_filename(filename=tmp_intermediate)

    if threshhold > THRESHHOLD_ZERO:
        msg = " * Setting values < {threshhold} in '{raster}' to zero"
        grass.verbose(
            msg.format(threshhold=threshhold, raster=tmp_intermediate))
        zerofy_small_values(tmp_intermediate, threshhold, tmp_output)

    else:
        tmp_output = tmp_intermediate

    # grass.verbose(_("Temporary map name: {name}".format(name=tmp_output)))
    grass.debug(_("Output map name: {name}".format(name=output_name)))
    # r.info(map=tmp_output, flags='gre')

    ### FIXME

    normalize_map(tmp_output, output_name)
Beispiel #46
0
def main():
    map = options['map']
    layer = options['layer']
    column = options['column']
    otable = options['other_table']
    ocolumn = options['other_column']
    if options['subset_columns']:
        scolumns = options['subset_columns'].split(',')
    else:
        scolumns = None

    try:
        f = grass.vector_layer_db(map, layer)
    except CalledModuleError:
        sys.exit(1)

    maptable = f['table']
    database = f['database']
    driver = f['driver']

    if driver == 'dbf':
        grass.fatal(_("JOIN is not supported for tables stored in DBF format"))

    if not maptable:
        grass.fatal(
            _("There is no table connected to this map. Unable to join any column."
              ))

    # check if column is in map table
    if column not in grass.vector_columns(map, layer):
        grass.fatal(
            _("Column <%s> not found in table <%s>") % (column, maptable))

    # describe other table
    all_cols_ot = grass.db_describe(otable, driver=driver,
                                    database=database)['cols']

    # check if ocolumn is on other table
    if ocolumn not in [ocol[0] for ocol in all_cols_ot]:
        grass.fatal(
            _("Column <%s> not found in table <%s>") % (ocolumn, otable))

    # determine columns subset from other table
    if not scolumns:
        # select all columns from other table
        cols_to_add = all_cols_ot
    else:
        cols_to_add = []
        # check if scolumns exists in the other table
        for scol in scolumns:
            found = False
            for col_ot in all_cols_ot:
                if scol == col_ot[0]:
                    found = True
                    cols_to_add.append(col_ot)
                    break
            if not found:
                grass.warning(
                    _("Column <%s> not found in table <%s>") % (scol, otable))

    all_cols_tt = grass.vector_columns(map, int(layer)).keys()

    select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column"
    template = string.Template("UPDATE $table SET $colname=(%s);" % select)

    for col in cols_to_add:
        # skip the vector column which is used for join
        colname = col[0]
        if colname == column:
            continue

        use_len = False
        if len(col) > 2:
            use_len = True
            # Sqlite 3 does not support the precision number any more
            if driver == "sqlite":
                use_len = False
            # MySQL - expect format DOUBLE PRECISION(M,D), see #2792
            elif driver == "mysql" and col[1] == 'DOUBLE PRECISION':
                use_len = False

        if use_len:
            coltype = "%s(%s)" % (col[1], col[2])
        else:
            coltype = "%s" % col[1]

        colspec = "%s %s" % (colname, coltype)

        # add only the new column to the table
        if colname not in all_cols_tt:
            try:
                grass.run_command('v.db.addcolumn',
                                  map=map,
                                  columns=colspec,
                                  layer=layer)
            except CalledModuleError:
                grass.fatal(_("Error creating column <%s>") % colname)

        stmt = template.substitute(table=maptable,
                                   column=column,
                                   otable=otable,
                                   ocolumn=ocolumn,
                                   colname=colname)
        grass.debug(stmt, 1)
        grass.verbose(
            _("Updating column <%s> of vector map <%s>...") % (colname, map))
        try:
            grass.write_command('db.execute',
                                stdin=stmt,
                                input='-',
                                database=database,
                                driver=driver)
        except CalledModuleError:
            grass.fatal(_("Error filling column <%s>") % colname)

    # write cmd history
    grass.vector_history(map)

    return 0
Beispiel #47
0
def compute_artificial_proximity(raster,
                                 distance_categories,
                                 output_name=None):
    """
    Compute proximity to artificial surfaces

    1. Distance to features
    2. Classify distances

    Parameters
    ----------
    raster :
        Name of input raster map

    distance_categories :
        Category rules to recode the input distance map

    output_name :
        Name to pass to temporary_filename() to create a temporary map name

    Returns
    -------
    tmp_output :
        Name of the temporary output map for internal, in-script, re-use

    Examples
    --------
    ...
    """
    artificial_distances = temporary_filename(filename=raster)

    grass.run_command(
        "r.grow.distance",
        input=raster,
        distance=artificial_distances,
        metric=EUCLIDEAN,
        quiet=True,
        overwrite=True,
    )

    # temporary maps will be removed
    if output_name:
        tmp_output = temporary_filename(filename=output_name)
        grass.debug(
            _("Pre-defined output map name {name}".format(name=tmp_output)))

    else:
        tmp_output = temporary_filename(filename="artificial_proximity")
        grass.debug(
            _("Hardcoded temporary map name {name}".format(name=tmp_output)))

    msg = "Computing proximity to '{mapname}'"
    msg = msg.format(mapname=raster)
    grass.verbose(_(msg))
    grass.run_command(
        "r.recode",
        input=artificial_distances,
        output=tmp_output,
        rules=distance_categories,
        overwrite=True,
    )

    output = grass.find_file(name=tmp_output, element="cell")
    if not output["file"]:
        grass.fatal("Proximity map {name} not created!".format(name=raster))
    #     else:
    #         g.message(_("Output map {name}:".format(name=tmp_output)))

    return tmp_output
def main():
    red = options['red']
    green = options['green']
    blue = options['blue']
    brightness = options['strength']
    full = flags['f']
    preserve = flags['p']
    reset = flags['r']

    global do_mp

    if flags['s']:
        do_mp = False

    # 90 or 98? MAX value controls brightness
    # think of percent (0-100), must be positive or 0
    # must be more than "2" ?

    if full:
        for i in [red, green, blue]:
            gscript.run_command('r.colors', map=i, color='grey', quiet=True)
        sys.exit(0)

    if reset:
        for i in [red, green, blue]:
            gscript.run_command('r.colors', map=i, color='grey255', quiet=True)
        sys.exit(0)

    if not preserve:
        if do_mp:
            gscript.message(_("Processing..."))
            # set up jobs and launch them
            proc = {}
            conn = {}
            for i in [red, green, blue]:
                conn[i] = mp.Pipe()
                proc[i] = mp.Process(target=get_percentile_mp,
                                     args=(
                                         i,
                                         ['2', brightness],
                                         conn[i],
                                     ))
                proc[i].start()
            gscript.percent(1, 2, 1)

            # collect results and wait for jobs to finish
            for i in [red, green, blue]:
                output_pipe, input_pipe = conn[i]
                (v0, v1) = input_pipe.recv()
                gscript.debug('parent (%s) (%.1f, %.1f)' % (i, v0, v1))
                input_pipe.close()
                proc[i].join()
                set_colors(i, v0, v1)
            gscript.percent(1, 1, 1)
        else:
            for i in [red, green, blue]:
                gscript.message(_("Processing..."))
                (v0, v1) = get_percentile(i, ['2', brightness])
                gscript.debug("<%s>:  min=%f   max=%f" % (i, v0, v1))
                set_colors(i, v0, v1)

    else:
        all_max = 0
        all_min = 999999

        if do_mp:
            gscript.message(_("Processing..."))
            # set up jobs and launch jobs
            proc = {}
            conn = {}
            for i in [red, green, blue]:
                conn[i] = mp.Pipe()
                proc[i] = mp.Process(target=get_percentile_mp,
                                     args=(
                                         i,
                                         ['2', brightness],
                                         conn[i],
                                     ))
                proc[i].start()
            gscript.percent(1, 2, 1)

            # collect results and wait for jobs to finish
            for i in [red, green, blue]:
                output_pipe, input_pipe = conn[i]
                (v0, v1) = input_pipe.recv()
                gscript.debug('parent (%s) (%.1f, %.1f)' % (i, v0, v1))
                input_pipe.close()
                proc[i].join()
                all_min = min(all_min, v0)
                all_max = max(all_max, v1)
            gscript.percent(1, 1, 1)
        else:
            for i in [red, green, blue]:
                gscript.message(_("Processing..."))
                (v0, v1) = get_percentile(i, ['2', brightness])
                gscript.debug("<%s>:  min=%f   max=%f" % (i, v0, v1))
                all_min = min(all_min, v0)
                all_max = max(all_max, v1)

        gscript.debug("all_min=%f   all_max=%f" % (all_min, all_max))
        for i in [red, green, blue]:
            set_colors(i, all_min, all_max)

    # write cmd history:
    mapset = gscript.gisenv()['MAPSET']
    for i in [red, green, blue]:
        if gscript.find_file(i)['mapset'] == mapset:
            gscript.raster_history(i)
Beispiel #49
0
def main():
    ############ DEFINITION CLEANUP TEMPORARY FILES ##############
    # global variables for cleanup
    global tmp_map_rast
    global tmp_map_vect

    tmp_map_rast = []
    tmp_map_vect = []

    ############ PARAMETER INPUT ##############
    # Check for correct input
    if str(options["exponential_output"]) == "" and str(
            options["ricker_output"]) == "":
        grass.fatal(_("Output name for a model is missing"))

    # Model parameters input
    t = int(options["timesteps"])

    # If populations patches are provided, otherwise single cell populations are used
    if options["population_patches"]:
        grass.run_command(
            "r.statistics2",
            base=options["population_patches"],
            cover=options["n_initial"],
            method="sum",
            output="n0_tmp_%d" % os.getpid(),
        )
    else:
        grass.run_command("g.copy",
                          raster=options["n_initial"] + "," +
                          "n0_tmp_%d" % os.getpid())

    tmp_map_rast.append("n0_tmp_")

    # Customized rounding function. Round based on a probability (p=digits after decimal point) to avoid "local stable states"
    # def prob_round(x, prec = 0):
    # 	fixup = numpy.sign(x) * 10**prec
    # 	x *= fixup
    # 	if options['seed']:
    # 		numpy.random.seed(seed=int(options['seed']))
    # 	round_func = int(x) + numpy.random.binomial(1,x-int(x))
    # 	return round_func/fixup
    # vprob_round = numpy.vectorize(prob_round)

    ################# Model Definiations #################
    # Model definitions modified from R scripts (http://www.mbr-pwrc.usgs.gov/workshops/unmarked/Rscripts/script-state-space.R)
    # Exponential Model

    def exponential_mod(n0, r, t):
        n = n0
        for t in range(t):
            n = 1.0 * n * numpy.exp(r)
            if flags["i"]:
                # n = vprob_round(n) #function not mature yet (takes partly long time, problems with NaNs)
                n = numpy.round(n)
        return n

    # Ricker Model
    def ricker_mod(n0, r, k, t):
        n = n0
        for t in range(t):
            numpy.seterr(invalid="ignore")
            n = 1.0 * n * numpy.exp(r * (1 - (n / k)))
            numpy.seterr(invalid="warn")
            if flags["i"]:
                # n = vprob_round(n) #function not mature yet (takes partly long time, problems with NaNs)
                n = numpy.round(n)
        return n

    ################# Exponential Model #################
    if options["exponential_output"]:
        # Check for correct input
        if options["r_exp_value"] and options["r_exp_map"]:
            grass.fatal(_("Provide either fixed value for r or raster map"))

        # Define r
        if options["r_exp_map"]:
            grass.debug(_("r_exp_map provided"))

            if options["population_patches"]:
                grass.run_command(
                    "r.statistics2",
                    base=options["population_patches"],
                    cover=options["r_exp_map"],
                    method="average",
                    output="r_exp_tmp_%d" % os.getpid(),
                )
            else:
                grass.run_command(
                    "g.copy",
                    raster=options["r_exp_map"] + "," +
                    "r_exp_tmp_%d" % os.getpid(),
                )

            tmp_map_rast.append("r_exp_tmp_")

            r = garray.array()
            r.read("r_exp_tmp_%d" % os.getpid())

        elif options["r_exp_value"]:
            r = float(options["r_exp_value"])
        else:
            grass.fatal(_("No r value/map provided for exponential model"))

        # run model
        n0_map = garray.array()
        n0_map.read("n0_tmp_%d" % os.getpid())
        exponential_map = garray.array()
        exponential_map[...] = exponential_mod(n0_map, r, t)
        ricker_map.write("exponential_output_tmp_%d" % os.getpid())
        tmp_map_rast.append("exponential_output_tmp_")

        # Retransform in case of patches
        if options["population_patches"]:
            grass.mapcalc(
                "$exponential_output = if($n0,(round(($n0*1.0/$n0_tmp)*$exponential_output_tmp)),null())",
                ricker_output=options["exponential_output"],
                n0=options["n_initial"],
                n0_tmp="n0_tmp_%d" % os.getpid(),
                ricker_output_tmp="exponential_output_tmp_%d" % os.getpid(),
            )

        else:
            grass.mapcalc(
                "$exponential_output = if($n0,$exponential_output_tmp,null())",
                exponential_output=options["exponential_output"],
                n0=options["n_initial"],
                exponential_output_tmp="exponential_output_tmp_%d" %
                os.getpid(),
            )

    ################# Ricker Model #################
    if options["ricker_output"]:
        # Check for correct input
        if options["r_rick_value"] and options["r_rick_map"]:
            grass.fatal(_("Provide either fixed value for r or raster map"))
        if options["k_value"] and options["k_map"]:
            grass.fatal(
                _("Provide either fixed value for carrying capacity (K) or raster map"
                  ))

        # Define r
        if options["r_rick_map"]:

            if options["population_patches"]:
                grass.run_command(
                    "r.statistics2",
                    base=options["population_patches"],
                    cover=options["r_rick_map"],
                    method="average",
                    output="r_rick_tmp_%d" % os.getpid(),
                )
            else:
                grass.run_command(
                    "g.copy",
                    raster=options["r_rick_map"] + "," +
                    "r_rick_tmp_%d" % os.getpid(),
                )

            tmp_map_rast.append("r_rick_tmp_")

            r = garray.array()
            r.read("r_rick_tmp_%d" % os.getpid())

        elif options["r_rick_value"]:
            r = float(options["r_rick_value"])
        else:
            grass.fatal(_("No r value/map for Ricker model provided"))

        # Define k
        if options["k_map"]:
            if options["population_patches"]:
                grass.run_command(
                    "r.statistics2",
                    base=options["population_patches"],
                    cover=options["k_map"],
                    method="sum",
                    output="k_tmp_%d" % os.getpid(),
                )
            else:
                grass.run_command("g.copy",
                                  raster=options["k_map"] + "," +
                                  "k_tmp_%d" % os.getpid())

            tmp_map_rast.append("k_tmp_")

            k = garray.array()
            k.read("k_tmp_%d" % os.getpid())

        elif options["k_value"]:
            k = float(options["k_value"])
        else:
            grass.fatal(_("No value/map for carrying capacity (k) provided"))

        # run model
        n0_map = garray.array()
        n0_map.read("n0_tmp_%d" % os.getpid())
        ricker_map = garray.array()
        ricker_map[...] = ricker_mod(n0_map, r, k, t)
        ricker_map.write("ricker_output_tmp_%d" % os.getpid())
        tmp_map_rast.append("ricker_output_tmp_")

        # Retransform in case of patches
        if options["population_patches"]:
            grass.mapcalc(
                "$ricker_output = if($n0,(round(($n0*1.0/$n0_tmp)*$ricker_output_tmp)),null())",
                ricker_output=options["ricker_output"],
                n0=options["n_initial"],
                n0_tmp="n0_tmp_%d" % os.getpid(),
                ricker_output_tmp="ricker_output_tmp_%d" % os.getpid(),
            )

        else:
            grass.mapcalc(
                "$ricker_output = if($n0,$ricker_output_tmp,null())",
                ricker_output=options["ricker_output"],
                n0=options["n_initial"],
                ricker_output_tmp="ricker_output_tmp_%d" % os.getpid(),
            )

    return 0
Beispiel #50
0
    def _download(self):
        """!Downloads data from WFS server

        @return temp_map with downloaded data
        """
        grass.message(_("Downloading data from WFS server..."))

        proj = self.projection_name + "=EPSG:" + str(self.o_srs)

        url = self.o_url + (
            "SERVICE=WFS&REQUEST=GetFeature&VERSION=%s&TYPENAME=%s" %
            (self.o_wfs_version, self.o_layers))

        if self.bbox:
            if self.flip_coords:
                # flip coordinates if projection is geographic (see:wfs_base.py _computeBbox)
                query_bbox = dict(self._flipBbox(self.bbox))
            else:
                query_bbox = self.bbox

            url += "&BBOX=%s,%s,%s,%s" % (
                query_bbox["minx"],
                query_bbox["miny"],
                query_bbox["maxx"],
                query_bbox["maxy"],
            )

        if self.o_maximum_features:
            url += "&MAXFEATURES=" + str(self.o_maximum_features)

        if self.o_urlparams != "":
            url += "&" + self.o_urlparams

        grass.debug(url)
        try:
            wfs_data = urlopen(url)
        except IOError:
            grass.fatal(_("Unable to fetch data from server"))

        temp_map = self._temp()

        # download data into temporary file
        try:
            temp_map_opened = open(temp_map, "w")
            temp_map_opened.write(wfs_data.read())
            temp_map_opened
        except IOError:
            grass.fatal(_("Unable to write data into tempfile"))
        finally:
            temp_map_opened.close()

        namespaces = [
            "http://www.opengis.net/ows", "http://www.opengis.net/ogc"
        ]

        context = etree.iterparse(temp_map, events=["start"])
        event, root = context.next()

        for namesp in namespaces:
            if (root.tag == "{%s}ExceptionReport" % namesp
                    or root.tag == "{%s}ServiceExceptionReport" % namesp):
                try:
                    error_xml_opened = open(temp_map, "r")
                    err_str = error_xml_opened.read()
                except IOError:
                    grass.fatal(_("Unable to read data from tempfile"))
                finally:
                    error_xml_opened.close()

                if err_str is not None:
                    grass.fatal(_("WFS server error: %s") % err_str)
                else:
                    grass.fatal(_("WFS server unknown error"))

        return temp_map
Beispiel #51
0
def download_tile(tile, url, pid, srtmv3, one, username, password):

    grass.debug("Download tile: %s" % tile, debug=1)
    output = tile + ".r.in.srtm.tmp." + str(pid)
    if srtmv3:
        if one:
            local_tile = str(tile) + ".SRTMGL1.hgt.zip"
        else:
            local_tile = str(tile) + ".SRTMGL3.hgt.zip"
    else:
        local_tile = str(tile) + ".hgt.zip"

    urllib2.urlcleanup()

    if srtmv3:
        remote_tile = str(url) + local_tile
        goturl = 1

        try:
            password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
            password_manager.add_password(
                None, "https://urs.earthdata.nasa.gov", username, password
            )

            cookie_jar = CookieJar()

            opener = urllib2.build_opener(
                urllib2.HTTPBasicAuthHandler(password_manager),
                # urllib2.HTTPHandler(debuglevel=1),    # Uncomment these two lines to see
                # urllib2.HTTPSHandler(debuglevel=1),   # details of the requests/responses
                urllib2.HTTPCookieProcessor(cookie_jar),
            )
            urllib2.install_opener(opener)

            request = urllib2.Request(remote_tile)
            response = urllib2.urlopen(request)

            fo = open(local_tile, "w+b")
            fo.write(response.read())
            fo.close
            time.sleep(0.5)
        except:
            goturl = 0
            pass

        return goturl

    # SRTM subdirs: Africa, Australia, Eurasia, Islands, North_America, South_America
    for srtmdir in (
        "Africa",
        "Australia",
        "Eurasia",
        "Islands",
        "North_America",
        "South_America",
    ):
        remote_tile = str(url) + str(srtmdir) + "/" + local_tile
        goturl = 1

        try:
            response = urllib2.urlopen(request)
            fo = open(local_tile, "w+b")
            fo.write(response.read())
            fo.close
            time.sleep(0.5)
            # does not work:
            # urllib.urlretrieve(remote_tile, local_tile, data = None)
        except:
            goturl = 0
            pass

        if goturl == 1:
            return 1

    return 0
Beispiel #52
0
    def _computeRequestData(self, bbox, tl_corner, tile_span, tile_size, mat_num_bbox):
        """!Initialize data needed for iteration through tiles. Used by WMTS_GRASS and OnEarth_GRASS drivers.
        """
        epsilon = 1e-15

        # request data bbox specified in row and col number
        self.t_num_bbox = {}

        self.t_num_bbox['min_col'] = int(
            floor((bbox['minx'] - tl_corner['minx']) / tile_span['x'] + epsilon))
        self.t_num_bbox['max_col'] = int(
            floor((bbox['maxx'] - tl_corner['minx']) / tile_span['x'] - epsilon))

        self.t_num_bbox['min_row'] = int(
            floor((tl_corner['maxy'] - bbox['maxy']) / tile_span['y'] + epsilon))
        self.t_num_bbox['max_row'] = int(
            floor((tl_corner['maxy'] - bbox['miny']) / tile_span['y'] - epsilon))

        # Does required bbox intersects bbox of data available on server?
        self.intersects = False
        for col in ['min_col', 'max_col']:
            for row in ['min_row', 'max_row']:
                if (self.t_num_bbox['min_row'] <= self.t_num_bbox[row] and self.t_num_bbox[row] <= mat_num_bbox['max_row']) and (
                        self.t_num_bbox['min_col'] <= self.t_num_bbox[col] and self.t_num_bbox[col] <= mat_num_bbox['max_col']):
                    self.intersects = True

        if not self.intersects:
            grass.warning(_('Region is out of server data extend.'))
            self.map_region = None
            return

        # crop request bbox to server data bbox extend
        if self.t_num_bbox['min_col'] < (mat_num_bbox['min_col']):
            self.t_num_bbox['min_col'] = int(mat_num_bbox['min_col'])

        if self.t_num_bbox['max_col'] > (mat_num_bbox['max_col']):
            self.t_num_bbox['max_col'] = int(mat_num_bbox['max_col'])

        if self.t_num_bbox['min_row'] < (mat_num_bbox['min_row']):
            self.t_num_bbox['min_row'] = int(mat_num_bbox['min_row'])

        if self.t_num_bbox['max_row'] > (mat_num_bbox['max_row']):
            self.t_num_bbox['max_row'] = int(mat_num_bbox['max_row'])

        grass.debug(
            't_num_bbox: min_col:%d max_col:%d min_row:%d max_row:%d' %
            (self.t_num_bbox['min_col'],
             self.t_num_bbox['max_col'],
             self.t_num_bbox['min_row'],
             self.t_num_bbox['max_row']),
            3)

        num_tiles = (self.t_num_bbox['max_col'] - self.t_num_bbox['min_col'] + 1) * (
            self.t_num_bbox['max_row'] - self.t_num_bbox['min_row'] + 1)
        grass.message(
            _('Fetching %d tiles with %d x %d pixel size per tile...') %
            (num_tiles, tile_size['x'], tile_size['y']))

        # georeference of raster, where tiles will be merged
        self.map_region = {}
        self.map_region['minx'] = self.t_num_bbox['min_col'] * tile_span['x'] + tl_corner['minx']
        self.map_region['maxy'] = tl_corner['maxy'] - (self.t_num_bbox['min_row']) * tile_span['y']

        self.map_region['maxx'] = (
            self.t_num_bbox['max_col'] + 1) * tile_span['x'] + tl_corner['minx']
        self.map_region['miny'] = tl_corner[
            'maxy'] - (self.t_num_bbox['max_row'] + 1) * tile_span['y']

        # size of raster, where tiles will be merged
        self.map_region['cols'] = int(tile_size['x'] *
                                      (self.t_num_bbox['max_col'] - self.t_num_bbox['min_col'] + 1))
        self.map_region['rows'] = int(tile_size['y'] *
                                      (self.t_num_bbox['max_row'] - self.t_num_bbox['min_row'] + 1))

        # hold information about current column and row during iteration
        self.i_col = self.t_num_bbox['min_col']
        self.i_row = self.t_num_bbox['min_row']

        # bbox for first tile request
        self.query_bbox = {
            'minx': tl_corner['minx'],
            'maxy': tl_corner['maxy'],
            'maxx': tl_corner['minx'] + tile_span['x'],
            'miny': tl_corner['maxy'] - tile_span['y'],
        }

        self.tile_ref = {
            'sizeX': tile_size['x'],
            'sizeY': tile_size['y']
        }
Beispiel #53
0
 def _debug(self, fn, msg):
     grass.debug("%s.%s: %s" % (self.__class__.__name__, fn, msg))
Beispiel #54
0
def main():

    global TMPLOC, SRCGISRC, TGTGISRC, GISDBASE
    global tile, tmpdir, in_temp, currdir, tmpregionname

    in_temp = False

    url = options["url"]
    username = options["username"]
    password = options["password"]
    local = options["local"]
    output = options["output"]
    memory = options["memory"]
    fillnulls = flags["n"]
    srtmv3 = flags["2"] == 0
    one = flags["1"]
    dozerotile = flags["z"]
    reproj_res = options["resolution"]

    overwrite = grass.overwrite()

    res = "00:00:03"
    if srtmv3:
        fillnulls = 0
        if one:
            res = "00:00:01"
    else:
        one = None

    if len(local) == 0:
        if len(url) == 0:
            if srtmv3:
                if one:
                    url = "https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11/"
                else:
                    url = "https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL3.003/2000.02.11/"
            else:
                url = "http://dds.cr.usgs.gov/srtm/version2_1/SRTM3/"

    if len(local) == 0:
        local = None

    # are we in LatLong location?
    s = grass.read_command("g.proj", flags="j")
    kv = grass.parse_key_val(s)

    if fillnulls == 1 and memory <= 0:
        grass.warning(
            _(
                "Amount of memory to use for interpolation must be positive, setting to 300 MB"
            )
        )
        memory = "300"

    # make a temporary directory
    tmpdir = grass.tempfile()
    grass.try_remove(tmpdir)
    os.mkdir(tmpdir)
    currdir = os.getcwd()
    pid = os.getpid()

    # change to temporary directory
    os.chdir(tmpdir)
    in_temp = True
    if local is None:
        local = tmpdir

    # save region
    tmpregionname = "r_in_srtm_tmp_region"
    grass.run_command("g.region", save=tmpregionname, overwrite=overwrite)

    # get extents
    if kv["+proj"] == "longlat":
        reg = grass.region()
    else:
        if not options["resolution"]:
            grass.fatal(
                _("The <resolution> must be set if the projection is not 'longlat'.")
            )
        reg2 = grass.parse_command("g.region", flags="uplg")
        north = [float(reg2["ne_lat"]), float(reg2["nw_lat"])]
        south = [float(reg2["se_lat"]), float(reg2["sw_lat"])]
        east = [float(reg2["ne_long"]), float(reg2["se_long"])]
        west = [float(reg2["nw_long"]), float(reg2["sw_long"])]
        reg = {}
        if np.mean(north) > np.mean(south):
            reg["n"] = max(north)
            reg["s"] = min(south)
        else:
            reg["n"] = min(north)
            reg["s"] = max(south)
        if np.mean(west) > np.mean(east):
            reg["w"] = max(west)
            reg["e"] = min(east)
        else:
            reg["w"] = min(west)
            reg["e"] = max(east)
        # get actual location, mapset, ...
        grassenv = grass.gisenv()
        tgtloc = grassenv["LOCATION_NAME"]
        tgtmapset = grassenv["MAPSET"]
        GISDBASE = grassenv["GISDBASE"]
        TGTGISRC = os.environ["GISRC"]

    if kv["+proj"] != "longlat":
        SRCGISRC, TMPLOC = createTMPlocation()
    if options["region"] is None or options["region"] == "":
        north = reg["n"]
        south = reg["s"]
        east = reg["e"]
        west = reg["w"]
    else:
        west, south, east, north = options["region"].split(",")
        west = float(west)
        south = float(south)
        east = float(east)
        north = float(north)

    # adjust extents to cover SRTM tiles: 1 degree bounds
    tmpint = int(north)
    if tmpint < north:
        north = tmpint + 1
    else:
        north = tmpint

    tmpint = int(south)
    if tmpint > south:
        south = tmpint - 1
    else:
        south = tmpint

    tmpint = int(east)
    if tmpint < east:
        east = tmpint + 1
    else:
        east = tmpint

    tmpint = int(west)
    if tmpint > west:
        west = tmpint - 1
    else:
        west = tmpint

    if north == south:
        north += 1
    if east == west:
        east += 1

    rows = abs(north - south)
    cols = abs(east - west)
    ntiles = rows * cols
    grass.message(_("Importing %d SRTM tiles...") % ntiles, flag="i")
    counter = 1

    srtmtiles = ""
    valid_tiles = 0
    for ndeg in range(south, north):
        for edeg in range(west, east):
            grass.percent(counter, ntiles, 1)
            counter += 1
            if ndeg < 0:
                tile = "S"
            else:
                tile = "N"
            tile = tile + "%02d" % abs(ndeg)
            if edeg < 0:
                tile = tile + "W"
            else:
                tile = tile + "E"
            tile = tile + "%03d" % abs(edeg)
            grass.debug("Tile: %s" % tile, debug=1)

            if local != tmpdir:
                gotit = import_local_tile(tile, local, pid, srtmv3, one)
            else:
                gotit = download_tile(tile, url, pid, srtmv3, one, username, password)
                if gotit == 1:
                    gotit = import_local_tile(tile, tmpdir, pid, srtmv3, one)
            if gotit == 1:
                grass.verbose(_("Tile %s successfully imported") % tile)
                valid_tiles += 1
            elif dozerotile:
                # create tile with zeros
                if one:
                    # north
                    if ndeg < -1:
                        tmpn = "%02d:59:59.5S" % (abs(ndeg) - 2)
                    else:
                        tmpn = "%02d:00:00.5N" % (ndeg + 1)
                    # south
                    if ndeg < 1:
                        tmps = "%02d:00:00.5S" % abs(ndeg)
                    else:
                        tmps = "%02d:59:59.5N" % (ndeg - 1)
                    # east
                    if edeg < -1:
                        tmpe = "%03d:59:59.5W" % (abs(edeg) - 2)
                    else:
                        tmpe = "%03d:00:00.5E" % (edeg + 1)
                    # west
                    if edeg < 1:
                        tmpw = "%03d:00:00.5W" % abs(edeg)
                    else:
                        tmpw = "%03d:59:59.5E" % (edeg - 1)
                else:
                    # north
                    if ndeg < -1:
                        tmpn = "%02d:59:58.5S" % (abs(ndeg) - 2)
                    else:
                        tmpn = "%02d:00:01.5N" % (ndeg + 1)
                    # south
                    if ndeg < 1:
                        tmps = "%02d:00:01.5S" % abs(ndeg)
                    else:
                        tmps = "%02d:59:58.5N" % (ndeg - 1)
                    # east
                    if edeg < -1:
                        tmpe = "%03d:59:58.5W" % (abs(edeg) - 2)
                    else:
                        tmpe = "%03d:00:01.5E" % (edeg + 1)
                    # west
                    if edeg < 1:
                        tmpw = "%03d:00:01.5W" % abs(edeg)
                    else:
                        tmpw = "%03d:59:58.5E" % (edeg - 1)

                grass.run_command("g.region", n=tmpn, s=tmps, e=tmpe, w=tmpw, res=res)
                grass.run_command(
                    "r.mapcalc",
                    expression="%s = 0" % (tile + ".r.in.srtm.tmp." + str(pid)),
                    quiet=True,
                )
                grass.run_command("g.region", region=tmpregionname)

    # g.list with sep = comma does not work ???
    pattern = "*.r.in.srtm.tmp.%d" % pid
    srtmtiles = grass.read_command(
        "g.list", type="raster", pattern=pattern, sep="newline", quiet=True
    )

    srtmtiles = srtmtiles.splitlines()
    srtmtiles = ",".join(srtmtiles)
    grass.debug("'List of Tiles: %s" % srtmtiles, debug=1)

    if valid_tiles == 0:
        grass.run_command(
            "g.remove", type="raster", name=str(srtmtiles), flags="f", quiet=True
        )
        grass.warning(_("No tiles imported"))
        if local != tmpdir:
            grass.fatal(_("Please check if local folder <%s> is correct.") % local)
        else:
            grass.fatal(
                _(
                    "Please check internet connection, credentials, and if url <%s> is correct."
                )
                % url
            )

    grass.run_command("g.region", raster=str(srtmtiles))

    grass.message(_("Patching tiles..."))
    if fillnulls == 0:
        if valid_tiles > 1:
            if kv["+proj"] != "longlat":
                grass.run_command("r.buildvrt", input=srtmtiles, output=output)
            else:
                grass.run_command("r.patch", input=srtmtiles, output=output)
        else:
            grass.run_command(
                "g.rename", raster="%s,%s" % (srtmtiles, output), quiet=True
            )
    else:
        ncells = grass.region()["cells"]
        if long(ncells) > 1000000000:
            grass.message(
                _("%s cells to interpolate, this will take some time") % str(ncells),
                flag="i",
            )
        if kv["+proj"] != "longlat":
            grass.run_command("r.buildvrt", input=srtmtiles, output=output + ".holes")
        else:
            grass.run_command("r.patch", input=srtmtiles, output=output + ".holes")
        mapstats = grass.parse_command(
            "r.univar", map=output + ".holes", flags="g", quiet=True
        )
        if mapstats["null_cells"] == "0":
            grass.run_command(
                "g.rename", raster="%s,%s" % (output + ".holes", output), quiet=True
            )
        else:
            grass.run_command(
                "r.resamp.bspline",
                input=output + ".holes",
                output=output + ".interp",
                se="0.0025",
                sn="0.0025",
                method="linear",
                memory=memory,
                flags="n",
            )
            grass.run_command(
                "r.patch",
                input="%s,%s" % (output + ".holes", output + ".interp"),
                output=output + ".float",
                flags="z",
            )
            grass.run_command(
                "r.mapcalc", expression="%s = round(%s)" % (output, output + ".float")
            )
            grass.run_command(
                "g.remove",
                type="raster",
                name="%s,%s,%s"
                % (output + ".holes", output + ".interp", output + ".float"),
                flags="f",
                quiet=True,
            )

    # switch to target location
    if kv["+proj"] != "longlat":
        os.environ["GISRC"] = str(TGTGISRC)
        # r.proj
        grass.message(_("Reprojecting <%s>...") % output)
        kwargs = {
            "location": TMPLOC,
            "mapset": "PERMANENT",
            "input": output,
            "memory": memory,
            "resolution": reproj_res,
        }
        if options["method"]:
            kwargs["method"] = options["method"]
        try:
            grass.run_command("r.proj", **kwargs)
        except CalledModuleError:
            grass.fatal(_("Unable to to reproject raster <%s>") % output)
    else:
        if fillnulls != 0:
            grass.run_command(
                "g.remove", type="raster", pattern=pattern, flags="f", quiet=True
            )

    # nice color table
    grass.run_command("r.colors", map=output, color="srtm", quiet=True)

    # write metadata:
    tmphist = grass.tempfile()
    f = open(tmphist, "w+")
    f.write(os.environ["CMDLINE"])
    f.close()
    if srtmv3:
        source1 = "SRTM V3"
    else:
        source1 = "SRTM V2.1"
    grass.run_command(
        "r.support",
        map=output,
        loadhistory=tmphist,
        description="generated by r.in.srtm.region",
        source1=source1,
        source2=(local if local != tmpdir else url),
    )
    grass.try_remove(tmphist)

    grass.message(_("Done: generated map <%s>") % output)
Beispiel #55
0
    def _download(self):
        """!Downloads data from WMS server using own driver

        @return temp_map with downloaded data
        """
        grass.message(_("Downloading data from WMS server..."))

        if "?" in self.params["url"]:
            self.params["url"] += "&"
        else:
            self.params["url"] += "?"

        if not self.params['capfile']:
            self.cap_file = self._fetchCapabilities(self.params)
        else:
            self.cap_file = self.params['capfile']

        # initialize correct manager according to chosen OGC service
        if self.params['driver'] == 'WMTS_GRASS':
            req_mgr = WMTSRequestMgr(
                self.params,
                self.bbox,
                self.region,
                self.proj_srs,
                self.cap_file)
        elif self.params['driver'] == 'WMS_GRASS':
            req_mgr = WMSRequestMgr(
                self.params,
                self.bbox,
                self.region,
                self.tile_size,
                self.proj_srs)
        elif self.params['driver'] == 'OnEarth_GRASS':
            req_mgr = OnEarthRequestMgr(
                self.params,
                self.bbox,
                self.region,
                self.proj_srs,
                self.cap_file)

        # get information about size in pixels and bounding box of raster, where
        # all tiles will be joined
        map_region = req_mgr.GetMapRegion()

        init = True
        temp_map = None

        fetch_try = 0

        # iterate through all tiles and download them
        while True:

            if fetch_try == 0:
                # get url for request the tile and information for placing the tile into
                # raster with other tiles
                tile = req_mgr.GetNextTile()

            # if last tile has been already downloaded
            if not tile:
                break

            # url for request the tile
            query_url = tile[0]

            # the tile size and offset in pixels for placing it into raster where tiles are joined
            tile_ref = tile[1]
            grass.debug(query_url, 2)
            try:
                wms_data = self._fetchDataFromServer(
                    query_url, self.params['username'],
                    self.params['password'])
            except (IOError, HTTPException) as e:
                if isinstance(e, HTTPError) and e.code == 401:
                    grass.fatal(
                        _("Authorization failed to '%s' when fetching data.\n%s") %
                        (self.params['url'], str(e)))
                else:
                    grass.fatal(
                        _("Unable to fetch data from: '%s'\n%s") %
                        (self.params['url'], str(e)))

            temp_tile = self._tempfile()

            # download data into temporary file
            try:
                temp_tile_opened = open(temp_tile, 'wb')
                temp_tile_opened.write(wms_data.read())
            except IOError as e:
                # some servers are not happy with many subsequent requests for tiles done immediately,
                # if immediate request was unsuccessful, try to repeat the request after 5s and 30s breaks
                # TODO probably servers can return more kinds of errors related to this
                # problem (not only 104)
                if isinstance(e, socket.error) and e[0] == 104 and fetch_try < 2:
                    fetch_try += 1

                    if fetch_try == 1:
                        sleep_time = 5
                    elif fetch_try == 2:
                        sleep_time = 30

                    grass.warning(
                        _("Server refused to send data for a tile.\nRequest will be repeated after %d s.") %
                        sleep_time)

                    sleep(sleep_time)
                    continue
                else:
                    grass.fatal(_("Unable to write data into tempfile.\n%s") % str(e))
            finally:
                temp_tile_opened.close()

            fetch_try = 0

            tile_dataset_info = gdal.Open(temp_tile, gdal.GA_ReadOnly)
            if tile_dataset_info is None:
                # print error xml returned from server
                try:
                    error_xml_opened = open(temp_tile, 'rb')
                    err_str = error_xml_opened.read()
                except IOError as e:
                    grass.fatal(_("Unable to read data from tempfile.\n%s") % str(e))
                finally:
                    error_xml_opened.close()

                if err_str is not None:
                    grass.fatal(_("WMS server error: %s") % err_str)
                else:
                    grass.fatal(_("WMS server unknown error"))

            temp_tile_pct2rgb = None
            if tile_dataset_info.RasterCount == 1 and \
               tile_dataset_info.GetRasterBand(1).GetRasterColorTable() is not None:
                # expansion of color table into bands
                temp_tile_pct2rgb = self._tempfile()
                tile_dataset = self._pct2rgb(temp_tile, temp_tile_pct2rgb)
            else:
                tile_dataset = tile_dataset_info

            # initialization of temp_map_dataset, where all tiles are merged
            if init:
                temp_map = self._tempfile()

                driver = gdal.GetDriverByName(self.gdal_drv_format)
                metadata = driver.GetMetadata()
                if gdal.DCAP_CREATE not in metadata or \
                        metadata[gdal.DCAP_CREATE] == 'NO':
                    grass.fatal(_('Driver %s does not supports Create() method') % drv_format)
                self.temp_map_bands_num = tile_dataset.RasterCount
                temp_map_dataset = driver.Create(temp_map, map_region['cols'], map_region['rows'],
                                                 self.temp_map_bands_num,
                                                 tile_dataset.GetRasterBand(1).DataType)
                init = False

            # tile is written into temp_map
            tile_to_temp_map = tile_dataset.ReadRaster(0, 0, tile_ref['sizeX'], tile_ref['sizeY'],
                                                       tile_ref['sizeX'], tile_ref['sizeY'])

            temp_map_dataset.WriteRaster(tile_ref['t_cols_offset'], tile_ref['t_rows_offset'],
                                         tile_ref['sizeX'], tile_ref['sizeY'], tile_to_temp_map)

            tile_dataset = None
            tile_dataset_info = None
            grass.try_remove(temp_tile)
            grass.try_remove(temp_tile_pct2rgb)

        if not temp_map:
            return temp_map
        # georeferencing and setting projection of temp_map
        projection = grass.read_command('g.proj',
                                        flags='wf',
                                        epsg=self.params['srs']).rstrip('\n')
        temp_map_dataset.SetProjection(projection)

        pixel_x_length = (map_region['maxx'] - map_region['minx']) / int(map_region['cols'])
        pixel_y_length = (map_region['miny'] - map_region['maxy']) / int(map_region['rows'])

        geo_transform = [
            map_region['minx'],
            pixel_x_length,
            0.0,
            map_region['maxy'],
            0.0,
            pixel_y_length]
        temp_map_dataset.SetGeoTransform(geo_transform)
        temp_map_dataset = None

        return temp_map
Beispiel #56
0
def main():
    # old connection
    old_database = options['old_database']
    old_schema = options['old_schema']
    # new connection
    default_connection = gscript.db_connection()
    if options['new_driver']:
        new_driver = options['new_driver']
    else:
        new_driver = default_connection['driver']
    if options['new_database']:
        new_database = options['new_database']
    else:
        new_database = default_connection['database']
    if options['new_schema']:
        new_schema = options['new_schema']
    else:
        new_schema = default_connection['schema']

    if old_database == '':
        old_database = None
    old_database_subst = None
    if old_database is not None:
        old_database_subst = substitute_db(old_database)

    new_database_subst = substitute_db(new_database)

    if old_database_subst == new_database_subst and old_schema == new_schema:
        gscript.fatal(
            _("Old and new database connection is identical. "
              "Nothing to do."))

    mapset = gscript.gisenv()['MAPSET']

    vectors = gscript.list_grouped('vect')[mapset]
    num_vectors = len(vectors)

    if flags['c']:
        # create new database if not existing
        create_db(new_driver, new_database)

    i = 0
    for vect in vectors:
        vect = "%s@%s" % (vect, mapset)
        i += 1
        gscript.message(
            _("%s\nReconnecting vector map <%s> "
              "(%d of %d)...\n%s") %
            ('-' * 80, vect, i, num_vectors, '-' * 80))
        for f in gscript.vector_db(vect, stderr=nuldev).values():
            layer = f['layer']
            schema_table = f['table']
            key = f['key']
            database = f['database']
            driver = f['driver']

            # split schema.table
            if '.' in schema_table:
                schema, table = schema_table.split('.', 1)
            else:
                schema = ''
                table = schema_table

            if new_schema:
                new_schema_table = "%s.%s" % (new_schema, table)
            else:
                new_schema_table = table

            gscript.debug(
                "DATABASE = '%s' SCHEMA = '%s' TABLE = '%s' ->\n"
                "      NEW_DATABASE = '%s' NEW_SCHEMA_TABLE = '%s'" %
                (old_database, schema, table, new_database, new_schema_table))

            do_reconnect = True
            if old_database_subst is not None:
                if database != old_database_subst:
                    do_reconnect = False
            if database == new_database_subst:
                do_reconnect = False
            if schema != old_schema:
                do_reconnect = False

            if do_reconnect:
                gscript.verbose(_("Reconnecting layer %d...") % layer)

                if flags['c']:
                    # check if table exists in new database
                    copy_tab(driver, database, schema_table, new_driver,
                             new_database, new_schema_table)

                # drop original table if required
                if flags['d']:
                    drop_tab(vect, layer, schema_table, driver,
                             substitute_db(database))

                # reconnect tables (don't use substituted new_database)
                # NOTE: v.db.connect creates an index on the key column
                try:
                    gscript.run_command('v.db.connect',
                                        flags='o',
                                        quiet=True,
                                        map=vect,
                                        layer=layer,
                                        driver=new_driver,
                                        database=new_database,
                                        table=new_schema_table,
                                        key=key)
                except CalledModuleError:
                    gscript.warning(
                        _("Unable to connect table <%s> to vector "
                          "<%s> on layer <%s>") % (table, vect, str(layer)))

            else:
                if database != new_database_subst:
                    gscript.warning(
                        _("Layer <%d> will not be reconnected "
                          "because database or schema do not "
                          "match.") % layer)
    return 0
def main():
    # old connection
    old_database = options['old_database']
    old_schema = options['old_schema']
    # new connection
    default_connection = grass.db_connection()
    if options['new_driver']:
        new_driver = options['new_driver']
    else:
        new_driver = default_connection['driver']
    if options['new_database']:
        new_database = options['new_database']
    else:
        new_database = default_connection['database']
    if options['new_schema']:
        new_schema = options['new_schema']
    else:
        new_schema = default_connection['schema']

    if old_database == '':
    	old_database = None
    old_database_subst = None
    if old_database is not None:
	old_database_subst = substitute_db(old_database)

    new_database_subst = substitute_db(new_database)
    
    if old_database_subst == new_database_subst and old_schema == new_schema:
	grass.fatal(_("Old and new database connection is identical. Nothing to do."))
    
    mapset = grass.gisenv()['MAPSET']
        
    vectors = grass.list_grouped('vect')[mapset]
    num_vectors = len(vectors)

    if flags['c']:
	# create new database if not existing
	create_db(new_driver, new_database)
    
    i = 0
    for vect in vectors:
        vect = "%s@%s" % (vect, mapset)
        i += 1
	grass.message(_("%s\nReconnecting vector map <%s> (%d of %d)...\n%s") % \
                          ('-' * 80, vect, i, num_vectors, '-' * 80))
        for f in grass.vector_db(vect, stderr = nuldev).itervalues():
            layer = f['layer']
            schema_table = f['table']
            key = f['key']
            database = f['database']
            driver = f['driver']
            
            # split schema.table
            if '.' in schema_table:
                schema, table = schema_table.split('.', 1)
            else:
                schema = ''
                table = schema_table
            
            if new_schema:
                new_schema_table = "%s.%s" % (new_schema, table)
            else:
                new_schema_table = table
            
            grass.debug("DATABASE = '%s' SCHEMA = '%s' TABLE = '%s' ->\n"
                        "      NEW_DATABASE = '%s' NEW_SCHEMA_TABLE = '%s'" % \
                            (old_database, schema, table, new_database, new_schema_table))

            do_reconnect = True
	    if old_database_subst is not None:
		if database != old_database_subst:
		    do_reconnect = False
	    if database == new_database_subst:
		do_reconnect = False
	    if schema != old_schema:
		do_reconnect = False
		
            if do_reconnect == True:
                grass.verbose(_("Reconnecting layer %d...") % layer)
                                          
                if flags['c']:
                    # check if table exists in new database
                    copy_tab(driver, database, schema_table,
                             new_driver, new_database, new_schema_table)
                
                # drop original table if required
                if flags['d']:
                    drop_tab(vect, layer, schema_table, driver, substitute_db(database))

                # reconnect tables (don't use substituted new_database)
		# NOTE: v.db.connect creates an index on the key column
                try:
                    grass.run_command('v.db.connect', flags = 'o', quiet = True, map = vect,
                                      layer = layer, driver = new_driver, database = new_database,
                                      table = new_schema_table, key = key)
                except CalledModuleError:
                    grass.warning(_("Unable to connect table <%s> to vector <%s> on layer <%s>") %
				  (table, vect, str(layer)))

            else:
		if database != new_database_subst:
		    grass.warning(_("Layer <%d> will not be reconnected because "
				    "database or schema do not match.") % layer)
	
    return 0
Beispiel #58
0
def main():

    pts_input = options["input"]
    output = options["output"]
    cost_map = options["cost_map"]
    post_mask = options["post_mask"]
    column = options["column"]
    friction = float(options["friction"])
    layer = options["layer"]
    where = options["where"]
    workers = int(options["workers"])

    if workers == 1 and "WORKERS" in os.environ:
        workers = int(os.environ["WORKERS"])
    if workers < 1:
        workers = 1

    pid = str(os.getpid())
    tmp_base = "tmp_icw_" + pid + "_"

    # do the maps exist?
    if not grass.find_file(pts_input, element="vector")["file"]:
        grass.fatal(_("Vector map <%s> not found") % pts_input)
    if post_mask:
        if grass.find_file("MASK")["file"]:
            grass.fatal(
                _("A MASK already exists; remove it before using the post_mask option."
                  ))
        if not grass.find_file(post_mask)["file"]:
            grass.fatal(_("Raster map <%s> not found") % post_mask)

    grass.verbose(_("v.surf.icw -- Inverse Cost Weighted Interpolation"))
    grass.verbose(
        _("Processing %s -> %s, column=%s, Cf=%g") %
        (pts_input, output, column, friction))

    if flags["r"]:
        grass.verbose(_("Using (d^n)*log(d) radial basis function."))

    grass.verbose(
        "------------------------------------------------------------------------"
    )

    # adjust so that tiny numbers don't hog all the FP precision space
    #  if friction = 4: divisor ~ 10.0
    #  if friction = 5: divisor ~ 100.0
    #  if friction = 6: divisor ~ 500.0
    if friction >= 4:
        divisor = 0.01 * pow(friction, 6)
    else:
        divisor = 1

    # Check that we have the column and it is the correct type
    try:
        coltype = grass.vector_columns(pts_input, layer)[column]
    except KeyError:
        grass.fatal(
            _("Data column <%s> not found in vector points map <%s>") %
            (column, pts_input))

    if coltype["type"] not in ("INTEGER", "DOUBLE PRECISION"):
        grass.fatal(_("Data column must be numberic"))

    # cleanse cost area mask to a flat =1 for my porpoises
    area_mask = tmp_base + "area"
    grass.mapcalc(
        "$result = if($cost_map, 1, null())",
        result=area_mask,
        cost_map=cost_map,
        quiet=True,
    )

    ## done with prep work,
    ########################################################################
    ## Commence crunching ..

    # crop out only points in region
    addl_opts = {}
    if where:
        addl_opts["where"] = "%s" % where

    points_list = grass.read_command("v.out.ascii",
                                     input=pts_input,
                                     output="-",
                                     flags="r",
                                     **addl_opts).splitlines()

    # Needed to strip away empty entries from MS Windows newlines
    #   list() is needed for Python 3 compatibility
    points_list = list([_f for _f in points_list if _f])

    # convert into a 2D list, drop unneeded cat column
    # to drop cat col, add this to the end of the line [:-1]
    # fixme: how does this all react for 3D starting points?
    for i in range(len(points_list)):
        points_list[i] = points_list[i].split("|")

    # count number of starting points (n). This value will later be decremented
    #  if points are found to be off the cost map or out of region.
    n = len(points_list)

    if n > 200:
        grass.warning(
            _("Computation is expensive! Please consider " +
              "fewer points or get ready to wait a while ..."))
        import time

        time.sleep(5)

    #### generate cost maps for each site in range
    grass.message(_("Generating cost maps ..."))

    # avoid do-it-yourself brain surgery
    points_list_orig = list(points_list)

    proc = {}
    num = 1
    for i in range(n):
        position = points_list_orig[i]
        easting = position[0]
        northing = position[1]
        cat = int(position[-1])

        # retrieve data value from vector's attribute table:
        data_value = grass.vector_db_select(pts_input,
                                            columns=column)["values"][cat][0]

        if not data_value:
            grass.message(
                _("Site %d of %d,  e=%.4f  n=%.4f  cat=%d  data=?") %
                (num, n, float(easting), float(northing), cat))
            grass.message(_(" -- Skipping, no data here."))
            del points_list[num - 1]
            n -= 1
            continue
        else:
            grass.message(
                _("Site %d of %d,  e=%.4f  n=%.4f  cat=%d  data=%.8g") %
                (num, n, float(easting), float(northing), cat,
                 float(data_value)))

        # we know the point is in the region, but is it in a non-null area of the cost surface?
        rast_val = (grass.read_command(
            "r.what",
            map=area_mask,
            coordinates="%s,%s" % (position[0], position[1]),
        ).strip().split("|")[-1])
        if rast_val == "*":
            grass.message(_(" -- Skipping, point lays outside of cost_map."))
            del points_list[num - 1]
            n -= 1
            continue

        # it's ok to proceed
        try:
            data_value = float(data_value)
        except:
            grass.fatal("Data value [%s] is non-numeric" % data_value)

        cost_site_name = tmp_base + "cost_site." + "%05d" % num
        proc[num - 1] = grass.start_command(
            "r.cost",
            flags="k",
            input=area_mask,
            output=cost_site_name,
            start_coordinates=easting + "," + northing,
            quiet=True,
        )
        # stall to wait for the nth worker to complete,
        if num % workers == 0:
            proc[num - 1].wait()

        num += 1

    # make sure everyone is finished
    for i in range(n):
        if proc[i].wait() != 0:
            grass.fatal(_("Problem running %s") % "r.cost")

    grass.message(_("Removing anomalies at site positions ..."))

    proc = {}
    for i in range(n):
        cost_site_name = tmp_base + "cost_site." + "%05d" % (i + 1)
        # max_cost="$GIS_OPT_MAX_COST"  : commented out until r.null cleansing/continue code is sorted out
        # start_points=tmp_idw_cost_site_$$

        # we do this so the divisor exists and the weighting is huge at the exact sample spots
        # more efficient to reclass to 1?
        proc[i] = grass.mapcalc_start(
            "$cost_n_cleansed = if($cost_n == 0, 0.1, $cost_n)",
            cost_n_cleansed=cost_site_name + ".cleansed",
            cost_n=cost_site_name,
            quiet=True,
        )
        # stall to wait for the nth worker to complete,
        if (i + 1) % workers == 0:
            # print 'stalling ...'
            proc[i].wait()

    # make sure everyone is finished
    for i in range(n):
        if proc[i].wait() != 0:
            grass.fatal(_("Problem running %s") % "r.mapcalc")

    grass.message(_("Applying radial decay ..."))

    proc = {}
    for i in range(n):
        cost_site_name = tmp_base + "cost_site." + "%05d" % (i + 1)
        grass.run_command("g.remove",
                          flags="f",
                          type="raster",
                          name=cost_site_name,
                          quiet=True)
        grass.run_command(
            "g.rename",
            raster=cost_site_name + ".cleansed" + "," + cost_site_name,
            quiet=True,
        )

        # r.to.vect then r.patch output
        # v.to.rast in=tmp_idw_cost_site_29978 out=tmp_idw_cost_val_$$ use=val val=10

        if not flags["r"]:
            #  exp(3,2) is 3^2  etc.  as is pow(3,2)
            # r.mapcalc "1by_cost_site_sqrd.$NUM =  1.0 / exp(cost_site.$NUM , $FRICTION)"
            #      EXPRESSION="1.0 / pow(cost_site.$NUM $DIVISOR, $FRICTION )"
            expr = "1.0 / pow($cost_n / " + str(divisor) + ", $friction)"
        else:
            # use log10() or ln() ?
            #      EXPRESSION="1.0 / ( pow(cost_site.$NUM, $FRICTION) * log (cost_site.$NUM) )"
            expr = '1.0 / ( pow($cost_n, $friction) * log($cost_n) )"'

        grass.debug("r.mapcalc expression is: [%s]" % expr)

        one_by_cost_site_sq_n = tmp_base + "1by_cost_site_sq." + "%05d" % (i +
                                                                           1)

        proc[i] = grass.mapcalc_start(
            "$result = " + expr,
            result=one_by_cost_site_sq_n,
            cost_n=cost_site_name,
            friction=friction,
            quiet=True,
        )
        # stall to wait for the nth worker to complete,
        if (i + 1) % workers == 0:
            # print 'stalling ...'
            proc[i].wait()

        # r.patch in=1by_cost_site_sqrd.${NUM},tmp_idw_cost_val_$$ out=1by_cost_site_sqrd.${NUM} --o
        # g.remove type=rast name=cost_site.$NUM -f

    # make sure everyone is finished
    for i in range(n):
        if proc[i].wait() != 0:
            grass.fatal(_("Problem running %s") % "r.mapcalc")

    grass.run_command(
        "g.remove",
        flags="f",
        type="raster",
        pattern=tmp_base + "cost_site.*",
        quiet=True,
    )
    # grass.run_command('g.list', type = 'raster', mapset = '.')

    #######################################################
    #### Step 3) find sum(cost^2)
    grass.verbose("")
    grass.verbose(_("Finding sum of squares ..."))

    # todo: test if MASK exists already, fatal exit if it does?
    if post_mask:
        grass.message(_("Setting post_mask <%s>"), post_mask)
        grass.mapcalc("MASK = $maskmap", maskmap=post_mask, overwrite=True)

    grass.message(_("Summation of cost weights ..."))

    input_maps = tmp_base + "1by_cost_site_sq.%05d" % 1

    global TMP_FILE
    TMP_FILE = grass.tempfile()
    with open(TMP_FILE, "w") as maplist:
        for i in range(2, n + 1):
            mapname = "%s1by_cost_site_sq.%05d" % (tmp_base, i)
            maplist.write(mapname + "\n")

    # grass.run_command('g.list', type = 'raster', mapset = '.')

    sum_of_1by_cost_sqs = tmp_base + "sum_of_1by_cost_sqs"
    try:
        grass.run_command("r.series",
                          method="sum",
                          file=TMP_FILE,
                          output=sum_of_1by_cost_sqs)
    except CalledModuleError:
        grass.fatal(_("Problem running %s") % "r.series")

    if post_mask:
        grass.message(_("Removing post_mask <%s>"), post_mask)
        grass.run_command("g.remove", flags="f", name="MASK", quiet=True)

    #######################################################
    #### Step 4) ( 1/di^2 / sum(1/d^2) ) *  ai
    grass.verbose("")
    grass.message(_("Creating partial weights ..."))

    proc = {}
    num = 1
    for position in points_list:
        easting = position[0]
        northing = position[1]
        cat = int(position[-1])
        data_value = grass.vector_db_select(pts_input,
                                            columns=column)["values"][cat][0]
        data_value = float(data_value)

        # failsafe: at this point the data values should all be valid
        if not data_value:
            grass.message(
                _("Site %d of %d,  cat = %d, data value = ?") % (num, n, cat))
            grass.message(
                _(" -- Skipping, no data here. [Probably programmer error]"))
            n -= 1
            continue
        else:
            grass.message(
                _("Site %d of %d,  cat = %d, data value = %.8g") %
                (num, n, cat, data_value))

        # we know the point is in the region, but is it in a non-null area of the cost surface?
        rast_val = (grass.read_command(
            "r.what",
            map=area_mask,
            coordinates="%s,%s" % (position[0], position[1]),
        ).strip().split("|")[-1])
        if rast_val == "*":
            grass.message(
                _(" -- Skipping, point lays outside of cost_map. [Probably programmer error]"
                  ))
            n -= 1
            continue

        partial_n = tmp_base + "partial." + "%05d" % num
        one_by_cost_site_sq = tmp_base + "1by_cost_site_sq." + "%05d" % num

        # "( $DATA_VALUE / $N ) * (1.0 - ( cost_sq_site.$NUM / sum_of_cost_sqs ))"
        # "( cost_sq_site.$NUM / sum_of_cost_sqs ) * ( $DATA_VALUE / $N )"

        proc[num - 1] = grass.mapcalc_start(
            "$partial_n = ($data * $one_by_cost_sq) / $sum_of_1by_cost_sqs",
            partial_n=partial_n,
            data=data_value,
            one_by_cost_sq=one_by_cost_site_sq,
            sum_of_1by_cost_sqs=sum_of_1by_cost_sqs,
            quiet=True,
        )

        # stall to wait for the nth worker to complete,
        if num % workers == 0:
            proc[num - 1].wait()

        # free up disk space ASAP
        # grass.run_command('g.remove', flags = 'f', type = 'raster', name = one_by_cost_site_sq, quiet = True)

        num += 1
        if num > n:
            break

    # make sure everyone is finished
    for i in range(n):
        proc[i].wait()

    # free up disk space ASAP
    grass.run_command(
        "g.remove",
        flags="f",
        type="raster",
        pattern=tmp_base + "1by_cost_site_sq.*",
        quiet=True,
    )
    # grass.run_command('g.list', type = 'raster', mapset = '.')

    #######################################################
    grass.message("")
    grass.message(_("Calculating final values ..."))

    input_maps = tmp_base + "partial.%05d" % 1
    for i in range(2, n + 1):
        input_maps += ",%spartial.%05d" % (tmp_base, i)

    try:
        grass.run_command("r.series",
                          method="sum",
                          input=input_maps,
                          output=output)
    except CalledModuleError:
        grass.fatal(_("Problem running %s") % "r.series")

    # TODO: r.patch in v.to.rast of values at exact seed site locations. currently set to null

    grass.run_command("r.colors", map=output, color="bcyr", quiet=True)
    grass.run_command("r.support",
                      map=output,
                      history="",
                      title="Inverse cost-weighted interpolation")
    grass.run_command("r.support",
                      map=output,
                      history="v.surf.icw interpolation:")
    grass.run_command(
        "r.support",
        map=output,
        history="  input map=" + pts_input + "   attribute column=" + column,
    )
    grass.run_command(
        "r.support",
        map=output,
        history="  cost map=" + cost_map + "   coefficient of friction=" +
        str(friction),
    )
    if flags["r"]:
        grass.run_command("r.support",
                          map=output,
                          history="  (d^n)*log(d) as radial basis function")
    if post_mask:
        grass.run_command("r.support",
                          map=output,
                          history="  post-processing mask=" + post_mask)
    if where:
        grass.run_command("r.support",
                          map=output,
                          history="  SQL query= WHERE " + where)

    # save layer #? to metadata?   command line hist?

    #######################################################
    # Step 5) rm cost and cost_sq maps, tmp_icw_points, etc
    cleanup()

    #######################################################
    # Step 6) done!
    grass.message(_("Done! Results written to <%s>." % output))