Example #1
0
    def mkContours(self):
        """Make a contour map of the given DEM, in the given catchment"""

        # check what breaks to take, if int find nice breaks, if list take as breaks
        if type(self.contours) is int:
            interval = self.contours
            # get stats of DEM for nice breaks
            stats=grass.parse_command('r.univar',map=self.elevation,flags='g')
            # make nice breaks
            minelev = int(float(stats['min'])/interval+1)*interval
            maxelev = int(float(stats['max'])/interval)*interval
            breaks  = range(minelev,maxelev+1,interval)
        else:
            breaks = self.contours
        if len(breaks)<2:
                grass.fatal('Need at least 2 contour breaks: %s \nRange of elevation: %s - %s' %(breaks,stats['min'],stats['max']))
        grass.message(('Contour breaks:',str(breaks)))
        # form mapcalc expression and execute
        exp = self.contourrast+'= if(%s<%s,1)' %(self.elevation, breaks[0]) # for the first, smaller than
        for b in breaks: exp+='+ if(%s>=%s,1)' %(self.elevation,b) # for all greater eq than
        grass.mapcalc(exp, overwrite=True)

        grass.message(('Calculated contourmap: %s' %self.contourrast))

        return
Example #2
0
def main():
    driver = UserSettings.Get(group = 'display', key = 'driver', subkey = 'type')
    if driver == 'png':
        os.environ['GRASS_RENDER_IMMEDIATE'] = 'png'
    else:
        os.environ['GRASS_RENDER_IMMEDIATE'] = 'cairo'
    
    first = options['first']
    second = options['second']
    mode = options['mode']

    for mapName in [first, second]:    
        if mapName:
            gfile = grass.find_file(name = mapName)
            if not gfile['name']:
                grass.fatal(_("Raster map <%s> not found") % mapName)

    app = wx.App()
    if not CheckWxVersion([2, 9]):
        wx.InitAllImageHandlers()

    frame = SwipeMapFrame(parent = None, giface = StandaloneGrassInterface())
    
    if first:
        frame.SetFirstRaster(first)
    if second:
        frame.SetSecondRaster(second)
    if first or second:
        frame.SetRasterNames()

    frame.SetViewMode(mode)
    frame.Show()

    app.MainLoop()
Example #3
0
def main():
    options, unused = gscript.parser()
    input = options['input']
    red = options['red']
    green = options['green']
    blue = options['blue']

    if not gscript.find_file(input)['file']:
        gscript.fatal(_("Raster map <%s> not found") % input)

    expressions = []
    maps = []
    if red:
        expressions.append('%s = r#${input}' % red)
        maps.append(red)
    if green:
        expressions.append('%s = g#${input}' % green)
        maps.append(green)
    if blue:
        expressions.append('%s = b#${input}' % blue)
        maps.append(blue)
    expr = ';'.join(expressions)
    gscript.mapcalc(expr, input=input)

    for name in maps:
        gscript.run_command('r.colors', map=name, color='grey255')
        gscript.raster_history(name)
def get_table_name(map_name, layer):
    ### Test to make sure the vector has the correct layer - and get the table name from it
    try:
        table_name = grass.vector_layer_db(map_name, layer)['name']
    except:
        grass.fatal("Map <%s> does not have layer number %s" % (map_name,layer))
    return table_name
Example #5
0
def main():
    """
    Compute cell areas
    """
    
    projinfo = grass.parse_command('g.proj', flags='g')
    
    options, flags = grass.parser()
    output = options['output']
    units = options['units']
    
    # First check if output exists
    if len(grass.parse_command('g.list', type='rast', 
                               pattern=options['output'])):
        if not grass.overwrite():
            grass.fatal("Raster map '" + options['output'] + 
                        "' already exists. Use '--o' to overwrite.")

    # Then compute
    if projinfo['units'] == 'meters':
        if units == 'm2':
            grass.mapcalc(output+' = nsres() * ewres()')
        elif units == 'km2':
            grass.mapcalc(output+' = nsres() * ewres() / 10.^6')
    elif projinfo['units'] == 'degrees':
        if units == 'm2':
            grass.mapcalc(output+' = ( 111195. * nsres() ) * \
                          ( ewres() * '+str(np.pi/180.)+' * 6371000. * cos(y()) )')
        elif units == 'km2':
            grass.mapcalc(output+' = ( 111.195 * nsres() ) * \
                          ( ewres() * '+str(np.pi/180.)+' * 6371. * cos(y()) )')
    else:
        print 'Units: ', + projinfo['units'] + ' not currently supported'
def read_sdf(input):
    """
    Read the input SDF, scanning for lines with "CUT LINE"
    Read the coords from the next three lines, and save to list
    Read the bank postions from the following line and save to another list
    Return both lists
    """
    with open(input, 'r') as sdf:
        lines = sdf.readlines()
        sdf.close()
		
    cutline_pts=[]
    bank_dist=[]
    for i in range(len(lines)):
        if 'CUT LINE' in lines[i]:
            coords1 = lines[i+1].strip().replace(" ", "").split(",")
            coords2 = lines[i+2].strip().replace(" ", "").split(",")
            coords3 = lines[i+3].strip().replace(" ", "").split(",")
            pt1 = [coords1[0],coords1[1]]
            pt2 = [coords2[0],coords2[1]]
            pt3 = [coords3[0],coords3[1]]
            cutline_pts.append([pt1,pt2,pt3])
            if 'BANK POSITIONS' in lines[i+4]:
                coords=lines[i+4].strip().split(':')[1]
                bank_pt = coords.split(',')
                bank_dist.append(bank_pt)
            else:
                grass.fatal("No BANK POSITIONS data in sdf file")
                return None
                    
    return cutline_pts, bank_dist
def create_db(driver, database):
    subst_database = substitute_db(database)
    if driver == 'dbf':
        path = subst_database
        # check if destination directory exists
        if not os.path.isdir(path):
	    # create dbf database
            os.makedirs(path)
	    return True
        return False
    
    if driver == 'sqlite':
        path = os.path.dirname(subst_database)
        # check if destination directory exists
        if not os.path.isdir(path):
            os.makedirs(path)
    
    if subst_database in grass.read_command('db.databases', quiet = True,
                                      driver = driver).splitlines():
        return False

    grass.info(_("Target database doesn't exist, "
                 "creating a new database using <%s> driver...") % driver)
    try:
        grass.run_command('db.createdb', driver = driver,
                          database = subst_database)
    except CalledModuleError:
        grass.fatal(_("Unable to create database <%s> by driver <%s>") % \
                        (subst_database, driver))
        
    return False
Example #8
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    method = options["method"]
    order = options["order"]
    where = options["where"]
    add_time = flags["t"]
    nulls = flags["n"]

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "strds")

    rows = sp.get_registered_maps("id", where, order, None)

    if rows:
        # Create the r.series input file
        filename = grass.tempfile(True)
        file = open(filename, 'w')

        for row in rows:
            string = "%s\n" % (row["id"])
            file.write(string)

        file.close()

        flag = ""
        if len(rows) > 1000:
            grass.warning(_("Processing over 1000 maps: activating -z flag of r.series which slows down processing"))
            flag += "z"
        if nulls:
            flag += "n"

        try:
            grass.run_command("r.series", flags=flag, file=filename,
                              output=output, overwrite=grass.overwrite(),
                              method=method)
        except CalledModuleError:
            grass.fatal(_("%s failed. Check above error messages.") % 'r.series')

        if not add_time:
            # Create the time range for the output map
            if output.find("@") >= 0:
                id = output
            else:
                mapset = grass.gisenv()["MAPSET"]
                id = output + "@" + mapset

            map = sp.get_new_map_instance(id)
            map.load()
            map.set_temporal_extent(sp.get_temporal_extent())

            # Register the map in the temporal database
            if map.is_in_db():
                map.update_all()
            else:
                map.insert()
Example #9
0
def main():
    options, flags = gscript.parser()

    import wx

    from grass.script.setup import set_gui_path
    set_gui_path()
    
    from core.utils import _
    from dbmgr.manager import AttributeManager

    mapName = gscript.find_file(options['map'], element='vector')['fullname']
    if not mapName:
        gscript.set_raise_on_error(False)
        gscript.fatal(_("Vector map <%s> not found") % options['map'])

    app = wx.App()
    gscript.message(_("Loading attribute data for vector map <%s>...") % mapName)
    f = AttributeManager(parent=None, id=wx.ID_ANY,
                         title="%s - <%s>" % (_("GRASS GIS Attribute Table Manager"),
                                              mapName),
                         size=(900, 600), vectorName=mapName)
    f.Show()

    app.MainLoop()
Example #10
0
    def __init__(self, opt_output):

        self.cleanup_mask = False
        self.cleanup_layers = False

        # output map name
        self.opt_output = opt_output

        # suffix for existing mask (during overriding will be saved
        # into raster named:self.opt_output + this suffix)
        self.original_mask_suffix = "_temp_MASK"

        # check names of temporary rasters, which module may create
        maps = []
        for suffix in (".red", ".green", ".blue", ".alpha", self.original_mask_suffix):
            rast = self.opt_output + suffix
            if grass.find_file(rast, element="cell", mapset=".")["file"]:
                maps.append(rast)

        if len(maps) != 0:
            grass.fatal(
                _(
                    "Please change output name, or change names of these rasters: %s, "
                    "module needs to create this temporary maps during execution."
                )
                % ",".join(maps)
            )
Example #11
0
def main():

    # Get the options
    input = options["input"]
    where = options["where"]
    columns = options["columns"]
    tempwhere = options["t_where"]
    layer = options["layer"]
    separator = grass.separator(options["separator"])

    if where == "" or where == " " or where == "\n":
        where = None

    if columns == "" or columns == " " or columns == "\n":
        columns = None

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "stvds")

    rows = sp.get_registered_maps("name,layer,mapset,start_time,end_time",
                                  tempwhere, "start_time", None)

    col_names = ""
    if rows:
        for row in rows:
            vector_name = "%s@%s" % (row["name"], row["mapset"])
            # In case a layer is defined in the vector dataset,
            # we override the option layer
            if row["layer"]:
                layer = row["layer"]

            select = grass.read_command("v.db.select", map=vector_name,
                                        layer=layer, columns=columns,
                                        separator="%s" % (separator), where=where)

            if not select:
                grass.fatal(_("Unable to run v.db.select for vector map <%s> "
                              "with layer %s") % (vector_name, layer))
            # The first line are the column names
            list = select.split("\n")
            count = 0
            for entry in list:
                if entry.strip() != "":
                    # print the column names in case they change
                    if count == 0:
                        col_names_new = "start_time%send_time%s%s" % (
                            separator, separator, entry)
                        if col_names != col_names_new:
                            col_names = col_names_new
                            print col_names
                    else:
                        if row["end_time"]:
                            print "%s%s%s%s%s" % (row["start_time"], separator,
                                                  row["end_time"], separator, entry)
                        else:
                            print "%s%s%s%s" % (row["start_time"],
                                                separator, separator, entry)
                    count += 1
Example #12
0
def _export_vector_maps(rows, tar, list_file, new_cwd, fs):
    for row in rows:
        name = row["name"]
        start = row["start_time"]
        end = row["end_time"]
        layer = row["layer"]

        # Export unique maps only
        if name in exported_maps:
            continue

        if not layer:
            layer = 1
        if not end:
            end = start
        string = "%s:%s%s%s%s%s\n" % (name, layer, fs, start, fs, end)
        # Write the filename, the start_time and the end_time
        list_file.write(string)
        # Export the vector map with v.pack
        try:
            gscript.run_command("v.pack", input=name, flags="c")
        except CalledModuleError:
            shutil.rmtree(new_cwd)
            tar.close()
            gscript.fatal(_("Unable to export vector map <%s> with v.pack" %
                          name))

        tar.add(name + ".pack")

        exported_maps[name] = name
Example #13
0
def get_extensions():
    addon_base = os.getenv('GRASS_ADDON_BASE')
    if not addon_base:
        grass.fatal(_("%s not defined") % "GRASS_ADDON_BASE")
    fXML = os.path.join(addon_base, 'modules.xml')
    if not os.path.exists(fXML):
        return []

    # read XML file
    fo = open(fXML, 'r')
    try:
        tree = etree.fromstring(fo.read())
    except StandardError as e:
        grass.error(_("Unable to parse metadata file: %s") % e)
        fo.close()
        return []
    
    fo.close()
    
    libgis_rev = grass.version()['libgis_revision']
    ret = list()
    for tnode in tree.findall('task'):
        gnode = tnode.find('libgis')
        if gnode is not None and \
                gnode.get('revision', '') != libgis_rev:
            ret.append(tnode.get('name'))
    
    return ret
Example #14
0
def importR():
    # R
    # unuseful since rpy2 will complain adequately.
    # try:
    #    #@FIXME: in Windows, it launches R terminal
    #    grass.find_program('R')
    # except:
    #    sys.exit(_("R is not installed. Install it and re-run, or modify environment variables."))

    # rpy2
    global robjects
    global rinterface
    grass.message(_('Loading dependencies, please wait...'))
    try:
        import rpy2.robjects as robjects
        import rpy2.rinterface as rinterface  # to speed up kriging? for plots.
    except ImportError:
        # ok for other OSes?
        grass.fatal(_("Python module 'Rpy2' not found. Please install it and re-run v.krige."))

    # R packages check. Will create one error message after check of all packages.
    missingPackagesList = []
    for each in ["rgeos", "gstat", "rgrass7", "maptools"]:
        if not robjects.r.require(each, quietly=True)[0]:
            missingPackagesList.append(each)
    if missingPackagesList:
        errorString = _("R package(s) ") + \
            ", ".join(map(str, missingPackagesList)) + \
            _(" missing. Install it/them and re-run v.krige.")
        grass.fatal(errorString)
Example #15
0
def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs):
    for row in rows:
        name = row["name"]
        start = row["start_time"]
        end = row["end_time"]
        layer = row["layer"]
        if not layer:
            layer = 1
        if not end:
            end = start
        string = "%s%s%s%s%s\n" % (name, fs, start, fs, end)
        # Write the filename, the start_time and the end_time
        list_file.write(string)
        # Export the vector map with v.out.ogr
        try:
            gscript.run_command("v.out.ogr", input=name, output=(name + ".xml"),
                                layer=layer, format="GML")
        except CalledModuleError:
            shutil.rmtree(new_cwd)
            tar.close()
            gscript.fatal(_("Unable to export vector map <%s> as "
                            "GML with v.out.ogr" % name))

        tar.add(name + ".xml")
        tar.add(name + ".xsd")
Example #16
0
def main():
    options, flags = gscript.parser()

    import wx

    from grass.script.setup import set_gui_path
    set_gui_path()
    
    try:
        from timeline.frame import TimelineFrame
    except ImportError as e:
        # TODO: why do we need this special check here, the reason of error
        # is wrong intallation or something, no need to report this to the
        # user in a nice way
        gscript.fatal(e.message)

    datasets = options['inputs'].strip().split(',')
    datasets = [data for data in datasets if data]
    view3d = flags['3']

    app = wx.App()
    frame = TimelineFrame(None)
    frame.SetDatasets(datasets)
    frame.Show3D(view3d)
    frame.Show()
    app.MainLoop()
Example #17
0
    def ImportMap(self, map, column):
        """ Imports GRASS map as SpatialPointsDataFrame and adds x/y columns to attribute table.
        Checks for NULL values in the provided column and exits if they are present."""

        #@NOTE: new way with R - as it doesn't alter original data
        Rpointmap = robjects.r.readVECT(map, type='point')
        # checks if x,y columns are present in dataframe. If they do are present, but with different names,
        # they'll be duplicated.
        if "x" not in robjects.r.names(Rpointmap):
            # extract coordinates with S4 method
            coordinatesPreDF = robjects.r['as.data.frame'](robjects.r.coordinates(Rpointmap))
            coordinatesDF = robjects.r['data.frame'](x=coordinatesPreDF.rx('coords.x1')[0],
                                                     y=coordinatesPreDF.rx('coords.x2')[0])
            # match coordinates with data slot of SpatialPointsDataFrame - maptools function
            # match is done on row.names
            Rpointmap = robjects.r.spCbind(Rpointmap, coordinatesDF)

        # GRASS checks for null values in the chosen column. R can hardly handle column as a variable,
        # looks for a hardcoded string.
        cols = grass.vector_columns(map=map, layer=1)
        nulls = int(grass.parse_command('v.univar',
                                        map=map,
                                        column=column,
                                        type='point',
                                        parse=(grass.parse_key_val,
                                               {'sep': ': '}
                                               )
                                        )['number of NULL attributes'])
        if nulls > 0:
            grass.fatal(
                _("%d NULL value(s) in the selected column - unable to perform kriging.") %
                nulls)
        return Rpointmap
Example #18
0
def create_localities(loc):
	"""
	Read and parse the csv file of localities
	The csv file must have exactly 5 folumns:
	"Locality name" (a string), index (integer), code (3-4 letters), X coord, Y coord (floats)
	Create a python list, where each entry is a list of:
	('Locality Name',Locality index,'Locality code, X coord, Y coord)
	Also Create a GRASS vector
	"""
	try:
		csv_file = open(loc,"rb")
	except IOError:
		grass.fatal("Cannot open localities file: "+loc)

	grass.message(" === Reading list of localities ===")
	locs=csv.reader(csv_file, delimiter=',')
	loc_list=[]
	for l in locs:
		# Add to the loc_list a tuple containing the code, X coord and Y coord
		loc_list.append([l[0],l[1],l[2],l[3],l[4]])

	csv_file.close()

	# Make a GRASS vector
	loc_vector = os.path.splitext(loc)[0]
	grass.run_command('v.in.ascii', input=loc, output=loc_vector, fs=",", x=4, y=5, 
			columns="loc_name varchar(32), id integer, code varchar(6), longitude double, latitude double", 
			quiet=True, overwrite=True)
	
	return loc_list, loc_vector
Example #19
0
    def checkOutletAndFromto(self):
        '''Check number of outlets and change to negative values in outlets and subbasins table'''
        # check basin outlets
        grass.message('''Find outlets (if other than ID 1, check if the accumulation
                     map has negative, ie. off-map flow)...''')
        # get fromto columns
        try: sboutin = readSubNxtID(self.subbasins)
        except ValueError: grass.fatal('Cant convert the subbasinID, nextID or inletID columns of %s to integers' %self.subbasins)
        outlets = sboutin[sboutin['subbasinID']==sboutin['nextID']]
        outlets['nextID'] = np.negative(outlets['nextID'])
        outlets['inletID']= 0
        change = outlets.copy()
        # manual change using fromto
        if 'fromto' in self.options:
            # append to change array
            change=np.append(change,self.fromto)
        # update subbasins and outlets table
        update = lambda sbb,map,col,val: grun('v.db.update',map=map,column=col,
                  where='subbasinID=%s' %sbb, value=val)
        # change for each row in change, in both vectors, both columns
        for s in change:
            for m in [self.subbasins,self.outlets]:
                for c in ['nextID','inletID']:
                    update(s['subbasinID'],m,c,s[c])

        # check outlets again for reporting
        sboutin = readSubNxtID(self.subbasins)
        outlets = sboutin[sboutin['subbasinID']==sboutin['nextID']]
        outlets = np.append(outlets,sboutin[sboutin['nextID']<=0])
        grass.message('Subbasin(s) %s is(are) outlet(s)' %outlets['subbasinID'])
        return
Example #20
0
    def _fetchCapabilities(self, options, flags):
        """!Download capabilities from WCS server

        @return cap (instance of method _fetchDataFromServer)
        """
        self._debug("_fetchCapabilities", "started")
        cap_url = options['url'].strip()

        if "?" in cap_url:
            cap_url += "&"
        else:
            cap_url += "?"

        cap_url += "SERVICE=WCS&REQUEST=GetCapabilities&VERSION=" + options['version']

        if options['urlparams']:
            cap_url += "&" + options['urlparams']

        gscript.message('Fetching capabilities file\n%s' % cap_url)

        try:
            cap = self._fetchDataFromServer(cap_url, options['username'], options['password'])
            print dir(cap)
        except (IOError, HTTPException), e:
            if urllib2.HTTPError == type(e) and e.code == 401:
                gscript.fatal(_("Authorization failed to <%s> when fetching capabilities") % options['url'])
            else:
                msg = _("Unable to fetch capabilities from <%s>: %s") % (options['url'], e)

                if hasattr(e, 'reason'):
                    msg += _("\nReason: ") + str(e.reason)

                gscript.fatal(msg)
Example #21
0
    def _createVRT(self):
        '''! create VRT with help of gdalbuildvrt program
        VRT is a virtual GDAL dataset format

        @return path to VRT file
        '''
        self._debug("_createVRT", "started")
        vrt_file = self._tempfile()
        command = ["gdalbuildvrt", '-te']
        command += self.params['boundingbox']
        command += [vrt_file, self.xml_file]
        command = [str(i) for i in command]

        gscript.verbose(' '.join(command))

        self.process = subprocess.Popen(command,
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.PIPE)
        self.out, self.err = self.process.communicate()
        gscript.verbose(self.out)

        if self.err:
            gscript.verbose(self.err+"\n")
            if "does not exist" in self.err:
                gscript.warning('Coverage "%s" cannot be opened / does not exist.' % self.params['coverage'])
            gscript.fatal("Generation of VRT-File failed (gdalbuildvrt ERROR). Set verbose-flag for details.")

        self._debug("_createVRT", "finished")
        return vrt_file
Example #22
0
    def _initializeParameters(self, options, flags):
        '''
        Initialize all given and needed parameters. Get region information and
        calculate boundingbox according to it

        '''
        self._debug("_initializeParameters", "started")

        self._env = os.environ.copy()
        self._env['GRASS_MESSAGE_FORMAT'] = 'gui'
        g.gisenv(set="GRASS_MESSAGE_FORMAT=gui")

        for key in ['url', 'coverage','output','location']:
            self.params[key] = options[key].strip()

        if not self.params['output']:
            self.params['output'] = self.params['coverage']
            if not gscript.overwrite():
                result = gscript.find_file(name = self.params['output'], element = 'cell')
                if  result['file']:
                    gscript.fatal("Raster map <%s> does already exist. Choose other output name or toggle flag --o." % self.params['output'])

        for key in ['password', 'username', 'version','region']:
            self.params[key] = options[key]

        # check if authentication information is complete
        if (self.params['password'] and self.params['username'] == '') or \
           (self.params['password'] == '' and self.params['username']):
                gscript.fatal(_("Please insert both %s and %s parameters or none of them." % ('password', 'username')))


        # configure region extent (specified name or current region)
        self.params['region'] = self._getRegionParams(options['region'])
        self.params['boundingbox'] = self._computeBbox(self.params['region'])
        self._debug("_initializeParameters", "finished")
Example #23
0
    def _getRegionParams(self,opt_region):
        """!Get region parameters from region specified or active default region

        @return region_params as a dictionary
        """
        self._debug("_getRegionParameters", "started")

        if opt_region:
            reg_spl = opt_region.strip().split('@', 1)
            reg_mapset = '.'
            if len(reg_spl) > 1:
                reg_mapset = reg_spl[1]

            if not gscript.find_file(name = reg_spl[0], element = 'windows',
                                   mapset = reg_mapset)['name']:
                 gscript.fatal(_("Region <%s> not found") % opt_region)

        if opt_region:
            s = gscript.read_command('g.region',
                                    quiet = True,
                                    flags = 'ug',
                                    region = opt_region)
            region_params = gscript.parse_key_val(s, val_type = float)
            gscript.verbose("Using region parameters for region %s" %opt_region)
        else:
            region_params = gscript.region()
            gscript.verbose("Using current grass region")

        self._debug("_getRegionParameters", "finished")
        return region_params
Example #24
0
def main():
    options, flags = grass.parser()
    satellite = options['satellite']
    output_basename = options['basename']
    inputs = options['input'].split(',')
    num_of_bands = 6
    if len(inputs) != num_of_bands:
        grass.fatal(_("The number of input raster maps (bands) should be %s") % num_of_bands)

    # this is here just for the compatibility with r.mapcalc expression
    # remove this if not really needed in new implementation
    bands = {}
    for i, band in enumerate(inputs):
        band_num = i + 1
        if band_num == 6:
            band_num = 7
        bands['band' + str(band_num)] = band
    print bands

    if satellite == 'landsat4_tm':
        calcN(output_basename, bands, 0, 4)
    elif satellite == 'landsat5_tm':
        calcN(output_basename, bands, 1, 5)
    elif satellite == 'landsat7_etm':
        calcN(output_basename, bands, 2, 7)
    elif satellite == 'modis':
        calcN(output_basename, bands, 3, 7)
    else:
        raise RuntimeError("Invalid satellite: " + satellite)

    grass.message(_("Tasseled Cap components calculated"))
Example #25
0
def check_progs():
    found_missing = False
    if not grass.find_program('v.mc.py'):
        found_missing = True
        grass.warning(_("'%s' required. Please install '%s' first \n using 'g.extension %s' or check \n PATH and GRASS_ADDON_PATH variables") % ('v.mc.py','v.mc.py','v.mc.py'))
        if found_missing:
            grass.fatal(_("An ERROR occurred running <v.ldm.py>"))
Example #26
0
def main():
    map = options['map']
    layer = options['layer']
    column = options['column']

    mapset = grass.gisenv()['MAPSET']

    if not grass.find_file(map, element = 'vector', mapset = mapset):
        grass.fatal(_("Vector map <%s> not found in current mapset") % map)

    f = grass.vector_layer_db(map, layer)

    table = f['table']
    keycol = f['key']
    database = f['database']
    driver = f['driver']

    if not table:
        grass.fatal(_("There is no table connected to the input vector map. Cannot rename any column"))

    cols = column.split(',')
    oldcol = cols[0]
    newcol = cols[1]

    if driver == "dbf":
        if len(newcol) > 10:
            grass.fatal(_("Column name <%s> too long. The DBF driver supports column names not longer than 10 characters") % newcol)

    if oldcol == keycol:
        grass.fatal(_("Cannot rename column <%s> as it is needed to keep table <%s> connected to the input vector map") % (oldcol, table))

    # describe old col
    oldcoltype = None
    for f in grass.db_describe(table)['cols']:
        if f[0] != oldcol:
            continue
        oldcoltype = f[1]
        oldcollength = f[2]

    # old col there?
    if not oldcoltype:
        grass.fatal(_("Column <%s> not found in table <%s>") % (oldcol, table))

    # some tricks
    if driver in ['sqlite', 'dbf']:
        if oldcoltype.upper() == "CHARACTER":
            colspec = "%s varchar(%s)" % (newcol, oldcollength)
        else:
            colspec = "%s %s" % (newcol, oldcoltype)

        grass.run_command('v.db.addcolumn', map = map, layer = layer, column = colspec)
        sql = "UPDATE %s SET %s=%s" % (table, newcol, oldcol)
        grass.write_command('db.execute', input = '-', database = database, driver = driver, stdin = sql)
        grass.run_command('v.db.dropcolumn', map = map, layer = layer, column = oldcol)
    else:
        sql = "ALTER TABLE %s RENAME %s TO %s" % (table, oldcol, newcol)
        grass.write_command('db.execute', input = '-', database = database, driver = driver, stdin = sql)

    # write cmd history:
    grass.vector_history(map)
Example #27
0
    def __init__(self, parent, title, vectmap, modeChoices, id = wx.ID_ANY,
                 layer = 1):
        wx.Frame.__init__(self, parent, id, title)
        
        self.SetIcon(wx.Icon(os.path.join(globalvar.ICONDIR, 'grass_sql.ico'),
                             wx.BITMAP_TYPE_ICO))
        
        self.parent     = parent
        
        # variables
        self.vectmap = vectmap # fullname
        if not "@" in self.vectmap:
            self.vectmap = grass.find_file(self.vectmap, element = 'vector')['fullname']
            if not self.vectmap:
                grass.fatal(_("Vector map <%s> not found") % vectmap)
        self.mapname, self.mapset = self.vectmap.split("@", 1)
        
        # db info
        self.layer = layer
        self.dbInfo = VectorDBInfo(self.vectmap)
        self.tablename = self.dbInfo.GetTable(self.layer)
        self.driver, self.database = self.dbInfo.GetDbSettings(self.layer)
        
        self.colvalues = []     # array with unique values in selected column
        
        self.panel = wx.Panel(parent = self, id = wx.ID_ANY)

        # statusbar
        self.statusbar = self.CreateStatusBar(number=1)
        
        self._doLayout(modeChoices)
Example #28
0
def main():

    name = options["input"]
    type_ = options["type"]
    shellstyle = flags['g']
    system = flags['d']
    history = flags['h']

    # Make sure the temporal database exists
    tgis.init()

    dbif, connected = tgis.init_dbif(None)

    rows = tgis.get_tgis_metadata(dbif)

    if system and not shellstyle:
        #      0123456789012345678901234567890
        print(" +------------------- Temporal DBMI backend information ----------------------+")
        print(" | DBMI Python interface:...... " + str(dbif.get_dbmi().__name__))
        print(" | Temporal database string:... " + str(
            tgis.get_tgis_database_string()))
        print(" | SQL template path:.......... " + str(
            tgis.get_sql_template_path()))
        if rows:
            for row in rows:
                print(" | %s .......... %s"%(row[0], row[1]))
        print(" +----------------------------------------------------------------------------+")
        return
    elif system:
        print("dbmi_python_interface=\'" + str(dbif.get_dbmi().__name__) + "\'")
        print("dbmi_string=\'" + str(tgis.get_tgis_database_string()) + "\'")
        print("sql_template_path=\'" + str(tgis.get_sql_template_path()) + "\'")
        if rows:
            for row in rows:
                print("%s=\'%s\'"%(row[0], row[1]))
        return

    if not system and not name:
        grass.fatal(_("Please specify %s=") % ("name"))

    if name.find("@") >= 0:
        id_ = name
    else:
        id_ = name + "@" + grass.gisenv()["MAPSET"]

    dataset = tgis.dataset_factory(type_, id_)

    if dataset.is_in_db(dbif) == False:
        grass.fatal(_("Dataset <%s> not found in temporal database") % (id_))

    dataset.select(dbif)

    if history == True and type in ["strds", "stvds", "str3ds"]:
        dataset.print_history()
        return

    if shellstyle == True:
        dataset.print_shell_info()
    else:
        dataset.print_info()
Example #29
0
def import_file(filename, archive, output, region):
    """Extracts one binary file from its archive and import it."""

    # open the archive
    with ZipFile(archive, 'r') as a:

        # create temporary file and directory
        tempdir = grass.tempdir()
        tempfile = os.path.join(tempdir, filename)

        # try to inflate and import the layer
        try:
            grass.message("Inflating '%s' ..." % filename)
            a.extract(filename, tempdir)
            grass.message("Importing '%s' as <%s> ..." % (filename, output))
            grass.run_command('r.in.bin',  flags='s', overwrite=True,
                              input=tempfile, output=output,
                              bytes=2, anull=-9999, **region)

        # if file is not present in the archive
        except KeyError:
            grass.fatal("Could not find '%s' in '%s'" % (filename, archive))

        # make sure temporary files are cleaned
        finally:
            grass.try_remove(tempfile)
            grass.try_rmdir(tempdir)
Example #30
0
    def __del__(self):
        # removes temporary mask, used for import transparent or warped temp_map
        if self.cleanup_mask:
            # clear temporary mask, which was set by module
            if grass.run_command("r.mask", quiet=True, flags="r") != 0:
                grass.fatal(_("%s failed") % "r.mask")

            # restore original mask, if exists
            if grass.find_file(self.opt_output + self.original_mask_suffix, element="cell", mapset=".")["name"]:
                if (
                    grass.run_command("g.copy", quiet=True, rast=self.opt_output + self.original_mask_suffix + ",MASK")
                    != 0
                ):
                    grass.fatal(_("%s failed") % "g.copy")

        # remove temporary created rasters
        if self.cleanup_layers:
            maps = []
            for suffix in (".red", ".green", ".blue", ".alpha", self.original_mask_suffix):
                rast = self.opt_output + suffix
                if grass.find_file(rast, element="cell", mapset=".")["file"]:
                    maps.append(rast)

            if maps:
                grass.run_command("g.remove", quiet=True, flags="fb", type="rast", pattern=",".join(maps))

        # delete environmental variable which overrides region
        if "GRASS_REGION" in os.environ.keys():
            os.environ.pop("GRASS_REGION")
Example #31
0
def check_map_name(name, mapset, element_type):
    if gscript.find_file(name, element=element_type, mapset=mapset)["file"]:
        gscript.fatal(
            _("Raster map <%s> already exists. "
              "Change the base name or allow overwrite.") % name)
Example #32
0
def main():
    grass.set_raise_on_error(False)

    options, flags = grass.parser()

    # import wx only after running parser
    # to avoid issues with complex imports when only interface is needed
    import wx

    from grass.script.setup import set_gui_path
    set_gui_path()

    from core.render import Map
    from mapdisp.frame import MapFrame
    from mapdisp.main import DMonGrassInterface
    from core.settings import UserSettings
    from vdigit.main import haveVDigit, errorMsg
    from grass.exceptions import CalledModuleError

    # define classes which needs imports as local
    # for longer definitions, a separate file would be a better option
    class VDigitMapFrame(MapFrame):

        def __init__(self, vectorMap):
            MapFrame.__init__(
                self, parent=None, Map=Map(), giface=DMonGrassInterface(None),
                title=_("GRASS GIS Vector Digitizer"), size=(850, 600))
            # this giface issue not solved yet, we must set mapframe aferwards
            self._giface._mapframe = self
            # load vector map
            mapLayer = self.GetMap().AddLayer(
                ltype='vector', name=vectorMap,
                command=['d.vect', 'map=%s' % vectorMap],
                active=True, hidden=False, opacity=1.0, render=True)

            # switch toolbar
            self.AddToolbar('vdigit', fixed=True)

            # start editing
            self.toolbars['vdigit'].StartEditing(mapLayer)

    if not haveVDigit:
        grass.fatal(_("Vector digitizer not available. %s") % errorMsg)

    if not grass.find_file(name=options['map'], element='vector',
                           mapset=grass.gisenv()['MAPSET'])['fullname']:
        if not flags['c']:
            grass.fatal(_("Vector map <%s> not found in current mapset. "
                          "New vector map can be created by providing '-c' flag.") %
                        options['map'])
        else:
            grass.verbose(_("New vector map <%s> created") % options['map'])
            try:
                grass.run_command(
                    'v.edit', map=options['map'],
                    tool='create', quiet=True)
            except CalledModuleError:
                grass.fatal(
                    _("Unable to create new vector map <%s>") %
                    options['map'])

    # allow immediate rendering
    driver = UserSettings.Get(group='display', key='driver', subkey='type')
    if driver == 'png':
        os.environ['GRASS_RENDER_IMMEDIATE'] = 'png'
    else:
        os.environ['GRASS_RENDER_IMMEDIATE'] = 'cairo'

    app = wx.App()
    frame = VDigitMapFrame(options['map'])
    frame.Show()

    app.MainLoop()
Example #33
0
    def ImportMapIntoGRASS(self, raster):
        """!Import raster into GRASS.
        """
        # importing temp_map into GRASS
        try:
            # do not use -o flag !
            grass.run_command('r.in.gdal',
                              flags='o',
                              quiet=True,
                              overwrite=True,
                              input=raster,
                              output=self.opt_output)
        except CalledModuleError:
            grass.fatal(_('%s failed') % 'r.in.gdal')

        # information for destructor to cleanup temp_layers, created
        # with r.in.gdal

        # setting region for full extend of imported raster
        if grass.find_file(self.opt_output + '.red',
                           element='cell',
                           mapset='.')['file']:
            region_map = self.opt_output + '.red'
        else:
            region_map = self.opt_output
        os.environ['GRASS_REGION'] = grass.region_env(rast=region_map)

        # mask created from alpha layer, which describes real extend
        # of warped layer (may not be a rectangle), also mask contains
        # transparent parts of raster
        if grass.find_file(self.opt_output + '.alpha',
                           element='cell',
                           mapset='.')['name']:
            # saving current mask (if exists) into temp raster
            if grass.find_file('MASK', element='cell', mapset='.')['name']:
                try:
                    mask_copy = self.opt_output + self.original_mask_suffix
                    grass.run_command('g.copy',
                                      quiet=True,
                                      raster='MASK,' + mask_copy)
                except CalledModuleError:
                    grass.fatal(_('%s failed') % 'g.copy')

            # info for destructor
            self.cleanup_mask = True
            try:
                grass.run_command('r.mask',
                                  quiet=True,
                                  overwrite=True,
                                  maskcats="0",
                                  flags='i',
                                  raster=self.opt_output + '.alpha')
            except CalledModuleError:
                grass.fatal(_('%s failed') % 'r.mask')

            if not self.cleanup_bands:
                # use the MASK to set NULL vlues
                for suffix in ('.red', '.green', '.blue'):
                    rast = self.opt_output + suffix
                    if grass.find_file(rast, element='cell',
                                       mapset='.')['file']:
                        grass.run_command('g.rename',
                                          rast='%s,%s' %
                                          (rast, rast + '_null'),
                                          quiet=True)
                        grass.run_command('r.mapcalc',
                                          expression='%s = %s' %
                                          (rast, rast + '_null'),
                                          quiet=True)
                        grass.run_command('g.remove',
                                          type='raster',
                                          name='%s' % (rast + '_null'),
                                          flags='f',
                                          quiet=True)

        # TODO one band + alpha band?
        if grass.find_file(self.opt_output + '.red',
                           element='cell',
                           mapset='.')['file'] and self.cleanup_bands:
            try:
                grass.run_command('r.composite',
                                  quiet=True,
                                  overwrite=True,
                                  red=self.opt_output + '.red',
                                  green=self.opt_output + '.green',
                                  blue=self.opt_output + '.blue',
                                  output=self.opt_output)
            except CalledModuleError:
                grass.fatal(_('%s failed') % 'r.composite')
Example #34
0
def main(options, flags):

    gisbase = os.getenv("GISBASE")
    if not gisbase:
        gs.fatal(_("$GISBASE not defined"))
        return 0

    # Reference / sample area or points
    ref_rast = options["ref_rast"]
    ref_vect = options["ref_vect"]
    if ref_rast:
        reftype = gs.raster_info(ref_rast)
        if reftype["datatype"] != "CELL":
            gs.fatal(_("The ref_rast map must have type CELL (integer)"))
        if (reftype["min"] != 0 and reftype["min"] != 1) or reftype["max"] != 1:
            gs.fatal(
                _(
                    "The ref_rast map must be a binary raster,"
                    " i.e. it should contain only values 0 and 1 or 1 only"
                    " (now the minimum is {} and maximum is {})".format(
                        reftype["min"], reftype["max"]
                    )
                )
            )

    # old environmental layers & variable names
    reference_layer = options["env"]
    reference_layer = reference_layer.split(",")
    raster_exists(reference_layer)
    variable_name = [z.split("@")[0] for z in reference_layer]
    variable_name = [x.lower() for x in variable_name]

    # new environmental variables
    projection_layers = options["env_proj"]
    if not projection_layers:
        to_be_projected = False
        projection_layers = reference_layer
    else:
        to_be_projected = True
        projection_layers = projection_layers.split(",")
        raster_exists(projection_layers)
        if (
            len(projection_layers) != len(reference_layer)
            and len(projection_layers) != 0
        ):
            gs.fatal(
                _(
                    "The number of reference and predictor variables"
                    " should be the same. You provided {} reference and {}"
                    " projection variables".format(
                        len(reference_layer), len(projection_layers)
                    )
                )
            )

    # output layers
    opl = options["output"]
    opc = opl + "_MES"
    ipi = [opl + "_" + i for i in variable_name]

    # flags
    flm = flags["m"]
    flk = flags["k"]
    fln = flags["n"]
    fli = flags["i"]
    flc = flags["c"]

    # digits / precision
    digits = int(options["digits"])
    digits2 = pow(10, digits)

    # get current region settings, to compare to new ones later
    region_1 = gs.parse_command("g.region", flags="g")

    # Text for history in metadata
    opt2 = dict((k, v) for k, v in options.items() if v)
    hist = " ".join("{!s}={!r}".format(k, v) for (k, v) in opt2.items())
    hist = "r.mess {}".format(hist)
    unused, tmphist = tempfile.mkstemp()
    with open(tmphist, "w") as text_file:
        text_file.write(hist)

    # Create reference layer if not defined
    if not ref_rast and not ref_vect:
        ref_rast = tmpname("tmp0")
        Module(
            "r.mapcalc",
            "{0} = if(isnull({1}),null(),1)".format(ref_rast, reference_layer[0]),
            quiet=True,
        )

    # Create the recode table - Reference distribution is raster
    citiam = gs.find_file(name="MASK", element="cell", mapset=gs.gisenv()["MAPSET"])
    if citiam["fullname"]:
        rname = tmpname("tmp3")
        Module("r.mapcalc", expression="{} = MASK".format(rname), quiet=True)

    if ref_rast:
        vtl = ref_rast

        # Create temporary layer based on reference layer
        tmpf0 = tmpname("tmp2")
        Module(
            "r.mapcalc", expression="{0} = int({1} * 1)".format(tmpf0, vtl), quiet=True
        )
        Module("r.null", map=tmpf0, setnull=0, quiet=True)
        if citiam["fullname"]:
            Module("r.mask", flags="r", quiet=True)
        for i in range(len(reference_layer)):

            # Create mask based on combined MASK/reference layer
            Module("r.mask", raster=tmpf0, quiet=True)

            # Calculate the frequency distribution
            tmpf1 = tmpname("tmp4")
            Module(
                "r.mapcalc",
                expression="{0} = int({1} * {2})".format(
                    tmpf1, digits2, reference_layer[i]
                ),
                quiet=True,
            )
            stats_out = Module(
                "r.stats",
                flags="cn",
                input=tmpf1,
                sort="asc",
                separator=";",
                stdout_=PIPE,
            ).outputs.stdout
            stval = {}
            stats_outlines = stats_out.replace("\r", "").split("\n")
            stats_outlines = [_f for _f in stats_outlines if _f]
            for z in stats_outlines:
                [val, count] = z.split(";")
                stval[float(val)] = float(count)
            sstval = sorted(stval.items(), key=operator.itemgetter(0))
            sstval = np.matrix(sstval)
            a = np.cumsum(np.array(sstval), axis=0)
            b = np.sum(np.array(sstval), axis=0)
            c = a[:, 1] / b[1] * 100

            # Remove tmp mask and set region to env_proj if needed
            Module("r.mask", quiet=True, flags="r")
            if to_be_projected:
                gs.use_temp_region()
                Module("g.region", quiet=True, raster=projection_layers[0])

            # get new region settings, to compare to original ones later
            region_2 = gs.parse_command("g.region", flags="g")

            # Get min and max values for recode table (based on full map)
            tmpf2 = tmpname("tmp5")
            Module(
                "r.mapcalc",
                expression="{0} = int({1} * {2})".format(
                    tmpf2, digits2, projection_layers[i]
                ),
                quiet=True,
            )
            d = gs.parse_command("r.univar", flags="g", map=tmpf2, quiet=True)

            # Create recode rules
            Dmin = int(d["min"])
            Dmax = int(d["max"])
            envmin = np.min(np.array(sstval), axis=0)[0]
            envmax = np.max(np.array(sstval), axis=0)[0]

            if Dmin < envmin:
                e1 = Dmin - 1
            else:
                e1 = envmin - 1
            if Dmax > envmax:
                e2 = Dmax + 1
            else:
                e2 = envmax + 1

            a1 = np.hstack([(e1), np.array(sstval.T[0])[0, :]])
            a2 = np.hstack([np.array(sstval.T[0])[0, :] - 1, (e2)])
            b1 = np.hstack([(0), c])

            fd2, tmprule = tempfile.mkstemp(suffix=variable_name[i])
            with open(tmprule, "w") as text_file:
                for k in np.arange(0, len(b1.T)):
                    text_file.write(
                        "%s:%s:%s\n" % (str(int(a1[k])), str(int(a2[k])), str(b1[k]))
                    )

            # Create the recode layer and calculate the IES
            compute_ies(tmprule, ipi[i], tmpf2, envmin, envmax)
            Module(
                "r.support",
                map=ipi[i],
                title="IES {}".format(reference_layer[i]),
                units="0-100 (relative score)",
                description="Environmental similarity {}".format(reference_layer[i]),
                loadhistory=tmphist,
            )

            # Clean up
            os.close(fd2)
            os.remove(tmprule)

            # Change region back to original
            gs.del_temp_region()

    # Create the recode table - Reference distribution is vector
    else:
        vtl = ref_vect

        # Copy point layer and add columns for variables
        tmpf0 = tmpname("tmp7")
        Module(
            "v.extract", quiet=True, flags="t", input=vtl, type="point", output=tmpf0
        )
        Module("v.db.addtable", quiet=True, map=tmpf0)

        # TODO: see if there is a more efficient way to handle the mask
        if citiam["fullname"]:
            Module("r.mask", quiet=True, flags="r")

        # Upload raster values and get value in python as frequency table
        sql1 = "SELECT cat FROM {}".format(str(tmpf0))
        cn = len(np.hstack(db.db_select(sql=sql1)))
        for m in range(len(reference_layer)):

            # Set mask back (this means that points outside the mask will
            # be ignored in the computation of the frequency distribution
            # of the reference variabele env(m))
            if citiam["fullname"]:
                Module("g.copy", raster=[rname, "MASK"], quiet=True)

            # Compute frequency distribution of variable(m)
            mid = str(m)
            laytype = gs.raster_info(reference_layer[m])["datatype"]
            if laytype == "CELL":
                columns = "envvar_{} integer".format(str(mid))
            else:
                columns = "envvar_{} double precision".format(str(mid))
            Module("v.db.addcolumn", map=tmpf0, columns=columns, quiet=True)
            sql2 = "UPDATE {} SET envvar_{} = NULL".format(str(tmpf0), str(mid))
            Module("db.execute", sql=sql2, quiet=True)
            coln = "envvar_{}".format(str(mid))
            Module(
                "v.what.rast",
                quiet=True,
                map=tmpf0,
                layer=1,
                raster=reference_layer[m],
                column=coln,
            )
            sql3 = (
                "SELECT {0}, count({0}) from {1} WHERE {0} IS NOT NULL "
                "GROUP BY {0} ORDER BY {0}"
            ).format(coln, tmpf0)
            volval = np.vstack(db.db_select(sql=sql3))
            volval = volval.astype(np.float, copy=False)
            a = np.cumsum(volval[:, 1], axis=0)
            b = np.sum(volval[:, 1], axis=0)
            c = a / b * 100

            # Check for point without values
            if b < cn:
                gs.info(
                    _(
                        "Please note that there were {} points without "
                        "value. This is probably because they are outside "
                        "the computational region or mask".format((cn - b))
                    )
                )

            # Set region to env_proj layers (if different from env) and remove
            # mask (if set above)
            if citiam["fullname"]:
                Module("r.mask", quiet=True, flags="r")
            if to_be_projected:
                gs.use_temp_region()
                Module("g.region", quiet=True, raster=projection_layers[0])
            region_2 = gs.parse_command("g.region", flags="g")

            # Multiply env_proj layer with dignum
            tmpf2 = tmpname("tmp8")
            Module(
                "r.mapcalc",
                expression="{0} = int({1} * {2})".format(
                    tmpf2, digits2, projection_layers[m]
                ),
                quiet=True,
            )

            # Calculate min and max values of sample points and raster layer
            envmin = int(min(volval[:, 0]) * digits2)
            envmax = int(max(volval[:, 0]) * digits2)
            Drange = gs.read_command("r.info", flags="r", map=tmpf2)
            Drange = str.splitlines(Drange)
            Drange = np.hstack([i.split("=") for i in Drange])
            Dmin = int(Drange[1])
            Dmax = int(Drange[3])

            if Dmin < envmin:
                e1 = Dmin - 1
            else:
                e1 = envmin - 1
            if Dmax > envmax:
                e2 = Dmax + 1
            else:
                e2 = envmax + 1

            a0 = volval[:, 0] * digits2
            a0 = a0.astype(np.int, copy=False)
            a1 = np.hstack([(e1), a0])
            a2 = np.hstack([a0 - 1, (e2)])
            b1 = np.hstack([(0), c])

            fd3, tmprule = tempfile.mkstemp(suffix=variable_name[m])
            with open(tmprule, "w") as text_file:
                for k in np.arange(0, len(b1)):
                    rtmp = "{}:{}:{}\n".format(
                        str(int(a1[k])), str(int(a2[k])), str(b1[k])
                    )
                    text_file.write(rtmp)

            # Create the recode layer and calculate the IES
            compute_ies(tmprule, ipi[m], tmpf2, envmin, envmax)
            Module(
                "r.support",
                map=ipi[m],
                title="IES {}".format(reference_layer[m]),
                units="0-100 (relative score)",
                description="Environmental similarity {}".format(reference_layer[m]),
                loadhistory=tmphist,
            )

            # Clean up
            os.close(fd3)
            os.remove(tmprule)

            # Change region back to original
            gs.del_temp_region()

    # Calculate MESS statistics
    # Set region to env_proj layers (if different from env)
    # Note: this changes the region, to ensure the newly created layers
    # are actually visible to the user. This goes against normal practise
    # There will be a warning.
    if to_be_projected:
        Module("g.region", quiet=True, raster=projection_layers[0])

    # MES
    Module("r.series", quiet=True, output=opc, input=ipi, method="minimum")
    gs.write_command("r.colors", map=opc, rules="-", stdin=COLORS_MES, quiet=True)

    # Write layer metadata
    Module(
        "r.support",
        map=opc,
        title="Areas with novel conditions",
        units="0-100 (relative score)",
        description="The multivariate environmental similarity" "(MES)",
        loadhistory=tmphist,
    )

    # Area with negative MES
    if fln:
        mod1 = "{}_novel".format(opl)
        Module("r.mapcalc", "{} = int(if( {} < 0, 1, 0))".format(mod1, opc), quiet=True)

        # Write category labels
        Module("r.category", map=mod1, rules="-", stdin=RECL_MESNEG, quiet=True)

        # Write layer metadata
        Module(
            "r.support",
            map=mod1,
            title="Areas with novel conditions",
            units="-",
            source1="Based on {}".format(opc),
            description="1 = novel conditions, 0 = within range",
            loadhistory=tmphist,
        )

    # Most dissimilar variable (MoD)
    if flm:
        tmpf4 = tmpname("tmp9")
        mod2 = "{}_MoD".format(opl)
        Module("r.series", quiet=True, output=tmpf4, input=ipi, method="min_raster")
        Module("r.mapcalc", "{} = int({})".format(mod2, tmpf4), quiet=True)

        fd4, tmpcat = tempfile.mkstemp()
        with open(tmpcat, "w") as text_file:
            for cats in range(len(ipi)):
                text_file.write("{}:{}\n".format(str(cats), reference_layer[cats]))
        Module("r.category", quiet=True, map=mod2, rules=tmpcat, separator=":")
        os.close(fd4)
        os.remove(tmpcat)

        # Write layer metadata
        Module(
            "r.support",
            map=mod2,
            title="Most dissimilar variable (MoD)",
            units="-",
            source1="Based on {}".format(opc),
            description="Name of most dissimilar variable",
            loadhistory=tmphist,
        )

    # sum(IES), where IES < 0
    if flk:
        mod3 = "{}_SumNeg".format(opl)
        c0 = -0.01 / digits2
        Module(
            "r.series",
            quiet=True,
            input=ipi,
            method="sum",
            range=("-inf", c0),
            output=mod3,
        )
        gs.write_command("r.colors", map=mod3, rules="-", stdin=COLORS_MES, quiet=True)

        # Write layer metadata
        Module(
            "r.support",
            map=mod3,
            title="Sum of negative IES values",
            units="-",
            source1="Based on {}".format(opc),
            description="Sum of negative IES values",
            loadhistory=tmphist,
        )

    # Number of layers with negative values
    if flc:
        tmpf5 = tmpname("tmp10")
        mod4 = "{}_CountNeg".format(opl)
        MinMes = gs.read_command("r.info", quiet=True, flags="r", map=opc)
        MinMes = str.splitlines(MinMes)
        MinMes = float(np.hstack([i.split("=") for i in MinMes])[1])
        c0 = -0.0001 / digits2
        Module(
            "r.series",
            quiet=True,
            input=ipi,
            output=tmpf5,
            method="count",
            range=(MinMes, c0),
        )
        gs.mapcalc("$mod4 = int($tmpf5)", mod4=mod4, tmpf5=tmpf5, quiet=True)

        # Write layer metadata
        Module(
            "r.support",
            map=mod4,
            title="Number of layers with negative values",
            units="-",
            source1="Based on {}".format(opc),
            description="Number of layers with negative values",
            loadhistory=tmphist,
        )

    # Remove IES layers
    if fli:
        Module("g.remove", quiet=True, flags="f", type="raster", name=ipi)
    # Clean up tmp file
    # os.remove(tmphist)

    gs.message(_("Finished ...\n"))
    if region_1 != region_2:
        gs.message(
            _(
                "\nPlease note that the region has been changes to match"
                " the set of projection (env_proj) variables.\n"
            )
        )
Example #35
0
def main():

    # Get the options
    file = options["file"]
    input = options["input"]
    maps = options["maps"]
    type = options["type"]

    # Make sure the temporal database exists
    tgis.init()

    if maps and file:
        grass.fatal(_(
            "%s= and %s= are mutually exclusive") % ("input", "file"))

    if not maps and not file:
        grass.fatal(_("%s= or %s= must be specified") % ("input", "file"))

    mapset = grass.gisenv()["MAPSET"]

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    # In case a space time dataset is specified
    if input:
        sp = tgis.open_old_stds(input, type, dbif)

    maplist = []

    dummy = tgis.RasterDataset(None)

    # Map names as comma separated string
    if maps is not None and maps != "":
        if maps.find(",") == -1:
            maplist = [maps, ]
        else:
            maplist = maps.split(",")

        # Build the maplist
        for count in range(len(maplist)):
            mapname = maplist[count]
            mapid = dummy.build_id(mapname, mapset)
            maplist[count] = mapid

    # Read the map list from file
    if file:
        fd = open(file, "r")

        line = True
        while True:
            line = fd.readline()
            if not line:
                break

            mapname = line.strip()
            mapid = dummy.build_id(mapname, mapset)
            maplist.append(mapid)

    num_maps = len(maplist)
    update_dict = {}
    count = 0

    statement = ""

    # Unregister already registered maps
    grass.message(_("Unregister maps"))
    for mapid in maplist:
        if count%10 == 0:
            grass.percent(count, num_maps, 1)

        map = tgis.dataset_factory(type, mapid)

        # Unregister map if in database
        if map.is_in_db(dbif) == True:
            # Unregister from a single dataset
            if input:
                # Collect SQL statements
                statement += sp.unregister_map(
                    map=map, dbif=dbif, execute=False)

            # Unregister from temporal database
            else:
                # We need to update all datasets after the removement of maps
                map.metadata.select(dbif)
                datasets = map.get_registered_stds(dbif)
                # Store all unique dataset ids in a dictionary
                if datasets:
                    for dataset in datasets:
                        update_dict[dataset] = dataset
                # Collect SQL statements
                statement += map.delete(dbif=dbif, update=False, execute=False)
        else:
            grass.warning(_("Unable to find %s map <%s> in temporal database" %
                            (map.get_type(), map.get_id())))

        count += 1

    # Execute the collected SQL statenents
    if statement:
        dbif.execute_transaction(statement)

    grass.percent(num_maps, num_maps, 1)

    # Update space time datasets
    grass.message(_("Unregister maps from space time dataset(s)"))
    if input:
        sp.update_from_registered_maps(dbif)
        sp.update_command_string(dbif=dbif)
    elif len(update_dict) > 0:
        count = 0
        for key in update_dict.keys():
            id = update_dict[key]
            sp = tgis.open_old_stds(id, type, dbif)
            sp.update_from_registered_maps(dbif)
            grass.percent(count, len(update_dict), 1)
            count += 1

    dbif.close()
Example #36
0
def main():
    repeat = int(options.pop("repeat"))
    nprocs = int(options.pop("nprocs"))
    subregions = options["subregions"]
    tosplit = flags["d"]
    # filter unused optional params
    for key in list(options.keys()):
        if options[key] == "":
            options.pop(key)
    if tosplit and "output_series" in options:
        gscript.fatal(
            _("Parallelization on subregion level is not supported together with <output_series> option"
              ))

    if (not gscript.overwrite() and gscript.list_grouped(
            "raster",
            pattern=options["output"] + "_run1")[gscript.gisenv()["MAPSET"]]):
        gscript.fatal(
            _("Raster map <{r}> already exists."
              " To overwrite, use the --overwrite flag").format(
                  r=options["output"] + "_run1"))
    global TMP_RASTERS
    cats = []
    if tosplit:
        gscript.message(_("Splitting subregions"))
        cats = (gscript.read_command("r.stats", flags="n",
                                     input=subregions).strip().splitlines())
        if len(cats) < 2:
            gscript.fatal(
                _("Not enough subregions to split computation. Do not use -d flag."
                  ))
        mapcalcs = []
        for cat in cats:
            new = PREFIX + cat
            TMP_RASTERS.append(new)
            mapcalcs.append("{new} = if({sub} == {cat}, {sub}, null())".format(
                sub=subregions, cat=cat, new=new))
        pool = Pool(nprocs)
        p = pool.map_async(split_subregions, mapcalcs)
        try:
            p.wait()
        except (KeyboardInterrupt, CalledModuleError):
            return

    options_list = []
    for i in range(repeat):
        if cats:
            for cat in cats:
                op = options.copy()
                op["random_seed"] = i + 1
                if "output_series" in op:
                    op["output_series"] += "_run" + str(i + 1) + "_" + cat
                    TMP_RASTERS.append(op["output_series"])
                op["output"] += "_run" + str(i + 1) + "_" + cat
                op["subregions"] = PREFIX + cat
                options_list.append((repeat, i + 1, cat, op))
                TMP_RASTERS.append(op["output"])
        else:
            op = options.copy()
            op["random_seed"] = i + 1
            if "output_series" in op:
                op["output_series"] += "_run" + str(i + 1)
            op["output"] += "_run" + str(i + 1)
            options_list.append((repeat, i + 1, None, op))

    pool = Pool(nprocs)
    p = pool.map_async(futures_process, options_list)
    try:
        p.wait()
    except (KeyboardInterrupt, CalledModuleError):
        return

    if cats:
        gscript.message(_("Patching subregions"))
        for i in range(repeat):
            patch_input = [
                options["output"] + "_run" + str(i + 1) + "_" + cat
                for cat in cats
            ]
            gscript.run_command(
                "r.patch",
                input=patch_input,
                output=options["output"] + "_run" + str(i + 1),
            )

    return 0
Example #37
0
def main():
    map = options['map']
    layer = options['layer']
    column = options['column']
    otable = options['other_table']
    ocolumn = options['other_column']
    if options['subset_columns']:
        scolumns = options['subset_columns'].split(',')
    else:
        scolumns = None

    try:
        f = grass.vector_layer_db(map, layer)
    except CalledModuleError:
        sys.exit(1)

    maptable = f['table']
    database = f['database']
    driver = f['driver']

    if driver == 'dbf':
        grass.fatal(_("JOIN is not supported for tables stored in DBF format"))

    if not maptable:
        grass.fatal(
            _("There is no table connected to this map. Unable to join any column."
              ))

    # check if column is in map table
    if column not in grass.vector_columns(map, layer):
        grass.fatal(
            _("Column <%s> not found in table <%s>") % (column, maptable))

    # describe other table
    all_cols_ot = grass.db_describe(otable, driver=driver,
                                    database=database)['cols']

    # check if ocolumn is on other table
    if ocolumn not in [ocol[0] for ocol in all_cols_ot]:
        grass.fatal(
            _("Column <%s> not found in table <%s>") % (ocolumn, otable))

    # determine columns subset from other table
    if not scolumns:
        # select all columns from other table
        cols_to_add = all_cols_ot
    else:
        cols_to_add = []
        # check if scolumns exists in the other table
        for scol in scolumns:
            found = False
            for col_ot in all_cols_ot:
                if scol == col_ot[0]:
                    found = True
                    cols_to_add.append(col_ot)
                    break
            if not found:
                grass.warning(
                    _("Column <%s> not found in table <%s>") % (scol, otable))

    all_cols_tt = grass.vector_columns(map, int(layer)).keys()

    select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column"
    template = string.Template("UPDATE $table SET $colname=(%s);" % select)

    for col in cols_to_add:
        # skip the vector column which is used for join
        colname = col[0]
        if colname == column:
            continue

        use_len = False
        if len(col) > 2:
            use_len = True
            # Sqlite 3 does not support the precision number any more
            if driver == "sqlite":
                use_len = False
            # MySQL - expect format DOUBLE PRECISION(M,D), see #2792
            elif driver == "mysql" and col[1] == 'DOUBLE PRECISION':
                use_len = False

        if use_len:
            coltype = "%s(%s)" % (col[1], col[2])
        else:
            coltype = "%s" % col[1]

        colspec = "%s %s" % (colname, coltype)

        # add only the new column to the table
        if colname not in all_cols_tt:
            try:
                grass.run_command('v.db.addcolumn',
                                  map=map,
                                  columns=colspec,
                                  layer=layer)
            except CalledModuleError:
                grass.fatal(_("Error creating column <%s>") % colname)

        stmt = template.substitute(table=maptable,
                                   column=column,
                                   otable=otable,
                                   ocolumn=ocolumn,
                                   colname=colname)
        grass.debug(stmt, 1)
        grass.verbose(
            _("Updating column <%s> of vector map <%s>...") % (colname, map))
        try:
            grass.write_command('db.execute',
                                stdin=stmt,
                                input='-',
                                database=database,
                                driver=driver)
        except CalledModuleError:
            grass.fatal(_("Error filling column <%s>") % colname)

    # write cmd history
    grass.vector_history(map)

    return 0
Example #38
0
def main():

    settings = options['settings']
    scene_names = options['scene_name'].split(',')
    output = options['output']
    nprocs = int(options['nprocs'])
    clouds = int(options['clouds'])
    producttype = options['producttype']
    start = options['start']
    end = options['end']
    use_scenenames = flags['s']
    ind_folder = flags['f']

    ### check if we have the i.sentinel.download + i.sentinel.import addons
    if not grass.find_program('i.sentinel.download', '--help'):
        grass.fatal(
            _("The 'i.sentinel.download' module was not found, install it first:"
              ) + "\n" + "g.extension i.sentinel")

    ### Test if all required data are there
    if not os.path.isfile(settings):
        grass.fatal(_("Settings file <%s> not found" % (settings)))

    ### set some common environmental variables, like:
    os.environ.update(
        dict(GRASS_COMPRESS_NULLS='1',
             GRASS_COMPRESSOR='ZSTD',
             GRASS_MESSAGE_FORMAT='plain'))

    ### test nprocs Settings
    if nprocs > mp.cpu_count():
        grass.fatal("Using %d parallel processes but only %d CPUs available." %
                    (nprocs, mp.cpu_count()))

    ### sentinelsat allows only three parallel downloads
    elif nprocs > 2:
        grass.message("Maximum number of parallel processes for Downloading" +
                      " fixed to 2 due to sentinelsat API restrictions")
        nprocs = 2

    if use_scenenames:
        scenenames = scene_names
        ### check if the filename is valid
        ### TODO: refine check, it's currently a very lazy check
        if len(scenenames[0]) < 10:
            grass.fatal(
                "No scene names indicated. Please provide scenenames in \
                        the format S2A_MSIL1C_20180822T155901_N0206_R097_T17SPV_20180822T212023.SAFE"
            )
    else:
        ### get a list of scenenames to download
        i_sentinel_download_string = grass.parse_command(
            'i.sentinel.download',
            settings=settings,
            producttype=producttype,
            start=start,
            end=end,
            clouds=clouds,
            flags='l')
        i_sentinel_keys = i_sentinel_download_string.keys()
        scenenames = [item.split(' ')[1] for item in i_sentinel_keys]

    ### parallelize download
    grass.message(_("Downloading Sentinel-2 data..."))

    ### adapt nprocs to number of scenes
    if len(scenenames) == 1:
        nprocs = 1

    queue_download = ParallelModuleQueue(nprocs=nprocs)

    for idx, scenename in enumerate(scenenames):
        producttype, start_date, end_date, query_string = scenename_split(
            scenename)
        ### output into separate folders, easier to import in a parallel way:
        if ind_folder:
            outpath = os.path.join(output, 'dl_s2_%s' % str(idx + 1))
        else:
            outpath = output
        i_sentinel_download = Module('i.sentinel.download',
                                     settings=settings,
                                     start=start_date,
                                     end=end_date,
                                     producttype=producttype,
                                     query=query_string,
                                     output=outpath,
                                     run_=False)
        queue_download.put(i_sentinel_download)
    queue_download.wait()
Example #39
0
def checkPercentile(per, di):
    """Check if percentile option is set with the oblique directions"""
    if not per and di in ["NW-SE", "NE-SW", "SW-NE", "SE-NW"]:
        grass.fatal(
            "Percentile option has to be set with {dire} direction".format(dire=di)
        )
Example #40
0
def main():
    global tmp
    tmp = grass.tempfile()

    extend = flags['e']
    shellstyle = flags['g']
    table = options['table']
    column = options['column']
    database = options['database']
    driver = options['driver']
    where = options['where']
    perc = options['percentile']

    perc = [float(p) for p in perc.split(',')]

    desc_table = grass.db_describe(table, database=database, driver=driver)
    if not desc_table:
        grass.fatal(_("Unable to describe table <%s>") % table)
    found = False
    for cname, ctype, cwidth in desc_table['cols']:
        if cname == column:
            found = True
            if ctype not in ('INTEGER', 'DOUBLE PRECISION'):
                grass.fatal(_("Column <%s> is not numeric") % cname)
    if not found:
        grass.fatal(_("Column <%s> not found in table <%s>") % (column, table))

    if not shellstyle:
        grass.verbose(_("Calculation for column <%s> of table <%s>...") % (column, table))
        grass.message(_("Reading column values..."))

    sql = "SELECT %s FROM %s" % (column, table)
    if where:
        sql += " WHERE " + where

    if not database:
        database = None

    if not driver:
        driver = None

    tmpf = file(tmp, 'w')
    grass.run_command('db.select', flags = 'c', table = table,
        database = database, driver = driver, sql = sql,
        stdout = tmpf)
    tmpf.close()

    # check if result is empty
    tmpf = file(tmp)
    if tmpf.read(1) == '':
        grass.fatal(_("Table <%s> contains no data.") % table)
        tmpf.close()

    # calculate statistics
    if not shellstyle:
        grass.verbose(_("Calculating statistics..."))

    N = 0
    sum = 0.0
    sum2 = 0.0
    sum3 = 0.0
    minv = 1e300
    maxv = -1e300

    tmpf = file(tmp)
    for line in tmpf:
	if len(line.rstrip('\r\n')) == 0:
	    continue
        x = float(line.rstrip('\r\n'))
        N += 1
        sum += x
        sum2 += x * x
        sum3 += abs(x)
        maxv = max(maxv, x)
        minv = min(minv, x)
    tmpf.close()

    if N <= 0:
        grass.fatal(_("No non-null values found"))

    if not shellstyle:
        sys.stdout.write("Number of values: %d\n"% N)
        sys.stdout.write("Minimum: %.15g\n"% minv)
        sys.stdout.write("Maximum: %.15g\n"% maxv)
        sys.stdout.write("Range: %.15g\n"% (maxv - minv))
        sys.stdout.write("Mean: %.15g\n"% (sum/N))
        sys.stdout.write("Arithmetic mean of absolute values: %.15g\n"% (sum3/N))
        sys.stdout.write("Variance: %.15g\n"% ((sum2 - sum*sum/N)/N))
        sys.stdout.write("Standard deviation: %.15g\n"% (math.sqrt((sum2 - sum*sum/N)/N)))
        sys.stdout.write("Coefficient of variation: %.15g\n"% ((math.sqrt((sum2 - sum*sum/N)/N))/(math.sqrt(sum*sum)/N)))
        sys.stdout.write("Sum: %.15g\n"% sum)
    else:
        sys.stdout.write("n=%d\n"% N)
        sys.stdout.write("min=%.15g\n"% minv)
        sys.stdout.write("max=%.15g\n"% maxv)
        sys.stdout.write("range=%.15g\n"% (maxv - minv))
        sys.stdout.write("mean=%.15g\n"% (sum/N))
        sys.stdout.write("mean_abs=%.15g\n"% (sum3/N))
        sys.stdout.write("variance=%.15g\n"% ((sum2 - sum*sum/N)/N))
        sys.stdout.write("stddev=%.15g\n"% (math.sqrt((sum2 - sum*sum/N)/N)))
        sys.stdout.write("coeff_var=%.15g\n"% ((math.sqrt((sum2 - sum*sum/N)/N))/(math.sqrt(sum*sum)/N)))
        sys.stdout.write("sum=%.15g\n"% sum)

    if not extend:
        return

    # preparations:
    sortfile(tmp, tmp + ".sort")

    number = N
    odd = N % 2
    eostr = ['even','odd'][odd]

    q25pos = round(N * 0.25)
    if q25pos == 0:
	q25pos = 1
    q50apos = round(N * 0.50)
    if q50apos == 0:
	q50apos = 1
    q50bpos = q50apos + (1 - odd)
    q75pos = round(N * 0.75)
    if q75pos == 0:
	q75pos = 1

    ppos = {}
    pval = {}
    for i in range(len(perc)):
        ppos[i] = round(N * perc[i] / 100)
	if ppos[i] == 0:
	    ppos[i] = 1
        pval[i] = 0

    inf = file(tmp + ".sort")
    l = 1
    for line in inf:
        if l == q25pos:
            q25 = float(line.rstrip('\r\n'))
        if l == q50apos:
            q50a = float(line.rstrip('\r\n'))
        if l == q50bpos:
            q50b = float(line.rstrip('\r\n'))
        if l == q75pos:
            q75 = float(line.rstrip('\r\n'))
        for i in range(len(ppos)):
            if l == ppos[i]:
                pval[i] = float(line.rstrip('\r\n'))
        l += 1

    q50 = (q50a + q50b) / 2

    if not shellstyle:
        sys.stdout.write("1st Quartile: %.15g\n" % q25)
        sys.stdout.write("Median (%s N): %.15g\n" % (eostr, q50))
        sys.stdout.write("3rd Quartile: %.15g\n" % q75)
        for i in range(len(perc)):
            if perc[i] == int(perc[i]): # integer
                if int(perc[i]) % 10 == 1 and int(perc[i]) != 11:
                    sys.stdout.write("%dst Percentile: %.15g\n"% (int(perc[i]), pval[i]))
                elif int(perc[i]) % 10 == 2 and int(perc[i]) != 12:
                    sys.stdout.write("%dnd Percentile: %.15g\n"% (int(perc[i]), pval[i]))
                elif int(perc[i]) % 10 == 3 and int(perc[i]) != 13:
                    sys.stdout.write("%drd Percentile: %.15g\n"% (int(perc[i]), pval[i]))
                else:
                    sys.stdout.write("%dth Percentile: %.15g\n"% (int(perc[i]), pval[i]))
            else:
                sys.stdout.write("%.15g Percentile: %.15g\n"% (perc[i], pval[i]))
    else:
        sys.stdout.write("first_quartile=%.15g\n"% q25)
        sys.stdout.write("median=%.15g\n"% q50)
        sys.stdout.write("third_quartile=%.15g\n"% q75)
        for i in range(len(perc)):
            percstr = "%.15g" % perc[i]
            percstr = percstr.replace('.','_')
            sys.stdout.write("percentile_%s=%.15g\n"% (percstr, pval[i]))
Example #41
0
def main():
    #lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    output = options["output"]
    vector_output = options["vector_output"]
    strds = options["strds"]
    where = options["where"]
    columns = options["columns"]

    if where == "" or where == " " or where == "\n":
        where = None

    overwrite = grass.overwrite()

    # Check the number of sample strds and the number of columns
    strds_names = strds.split(",")
    column_names = columns.split(",")

    if len(strds_names) != len(column_names):
        grass.fatal(
            _("The number of columns must be equal to the number of space time raster datasets"
              ))

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    mapset = grass.gisenv()["MAPSET"]

    out_sp = tgis.check_new_stds(output, "stvds", dbif, overwrite)

    samples = []

    first_strds = tgis.open_old_stds(strds_names[0], "strds", dbif)

    # Single space time raster dataset
    if len(strds_names) == 1:
        rows = first_strds.get_registered_maps(
            columns="name,mapset,start_time,end_time",
            order="start_time",
            dbif=dbif)

        if not rows:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> is empty") % out_sp.get_id())

        for row in rows:
            start = row["start_time"]
            end = row["end_time"]
            raster_maps = [
                row["name"] + "@" + row["mapset"],
            ]

            s = Sample(start, end, raster_maps)
            samples.append(s)
    else:
        # Multiple space time raster datasets
        for name in strds_names[1:]:
            dataset = tgis.open_old_stds(name, "strds", dbif)
            if dataset.get_temporal_type() != first_strds.get_temporal_type():
                grass.fatal(_("Temporal type of space time raster datasets must be equal\n"
                              "<%(a)s> of type %(type_a)s do not match <%(b)s> of type %(type_b)s"%\
                              {"a":first_strds.get_id(),
                               "type_a":first_strds.get_temporal_type(),
                               "b":dataset.get_id(),
                               "type_b":dataset.get_temporal_type()}))

        mapmatrizes = tgis.sample_stds_by_stds_topology(
            "strds", "strds", strds_names, strds_names[0], False, None,
            "equal", False, False)

        for i in range(len(mapmatrizes[0])):
            isvalid = True
            mapname_list = []
            for mapmatrix in mapmatrizes:

                entry = mapmatrix[i]

                if entry["samples"]:
                    sample = entry["samples"][0]
                    name = sample.get_id()
                    if name is None:
                        isvalid = False
                        break
                    else:
                        mapname_list.append(name)

            if isvalid:
                entry = mapmatrizes[0][i]
                map = entry["granule"]

                start, end = map.get_temporal_extent_as_tuple()
                s = Sample(start, end, mapname_list)
                samples.append(s)

    num_samples = len(samples)

    # Get the layer and database connections of the input vector
    vector_db = grass.vector.vector_db(input)

    # We copy the vector table and create the new layers
    if vector_db:
        # Use the first layer to copy the categories from
        layers = "1,"
    else:
        layers = ""
    first = True
    for layer in range(num_samples):
        layer += 1
        # Skip existing layer
        if vector_db and layer in vector_db and \
           vector_db[layer]["layer"] == layer:
            continue
        if first:
            layers += "%i" % (layer)
            first = False
        else:
            layers += ",%i" % (layer)

    vectmap = vector_output

    # We create a new vector map using the categories of the original map
    try:
        grass.run_command("v.category",
                          input=input,
                          layer=layers,
                          output=vectmap,
                          option="transfer",
                          overwrite=overwrite)
    except CalledModuleError:
        grass.fatal(
            _("Unable to create new layers for vector map <%s>") % (vectmap))

    title = _("Observaion of space time raster dataset(s) <%s>") % (strds)
    description = _("Observation of space time raster dataset(s) <%s>"
                    " with vector map <%s>") % (strds, input)

    # Create the output space time vector dataset
    out_sp = tgis.open_new_stds(output, "stvds",
                                first_strds.get_temporal_type(),
                                title, description,
                                first_strds.get_semantic_type(), dbif,
                                overwrite)

    dummy = out_sp.get_new_map_instance(None)

    # Sample the space time raster dataset with the vector
    # map at specific layer with v.what.rast
    count = 1
    for sample in samples:
        raster_names = sample.raster_names

        if len(raster_names) != len(column_names):
            grass.fatal(
                _("The number of raster maps in a granule must "
                  "be equal to the number of column names"))

        # Create the columns creation string
        columns_string = ""
        for name, column in zip(raster_names, column_names):
            # The column is by default double precision
            coltype = "DOUBLE PRECISION"
            # Get raster map type
            raster_map = tgis.RasterDataset(name)
            raster_map.load()
            if raster_map.metadata.get_datatype() == "CELL":
                coltype = "INT"

            tmp_string = "%s %s," % (column, coltype)
            columns_string += tmp_string

        # Remove last comma
        columns_string = columns_string[0:len(columns_string) - 1]

        # Try to add a column
        if vector_db and count in vector_db and vector_db[count]["table"]:
            try:
                grass.run_command("v.db.addcolumn",
                                  map=vectmap,
                                  layer=count,
                                  column=columns_string,
                                  overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(
                    _("Unable to add column %s to vector map <%s> "
                      "with layer %i") % (columns_string, vectmap, count))
        else:
            # Try to add a new table
            grass.message("Add table to layer %i" % (count))
            try:
                grass.run_command("v.db.addtable",
                                  map=vectmap,
                                  layer=count,
                                  columns=columns_string,
                                  overwrite=overwrite)
            except CalledModuleError:
                dbif.close()
                grass.fatal(
                    _("Unable to add table to vector map "
                      "<%s> with layer %i") % (vectmap, count))

        # Call v.what.rast for each raster map
        for name, column in zip(raster_names, column_names):
            try:
                grass.run_command("v.what.rast",
                                  map=vectmap,
                                  layer=count,
                                  raster=name,
                                  column=column,
                                  where=where)
            except CalledModuleError:
                dbif.close()
                grass.fatal(_("Unable to run v.what.rast for vector map <%s> "
                            "with layer %i and raster map <%s>") % \
                            (vectmap, count, str(raster_names)))

        vect = out_sp.get_new_map_instance(
            dummy.build_id(vectmap, mapset, str(count)))
        vect.load()

        start = sample.start
        end = sample.end

        if out_sp.is_time_absolute():
            vect.set_absolute_time(start, end)
        else:
            vect.set_relative_time(start, end,
                                   first_strds.get_relative_time_unit())

        if vect.is_in_db(dbif):
            vect.update_all(dbif)
        else:
            vect.insert(dbif)

        out_sp.register_map(vect, dbif)
        count += 1

    out_sp.update_from_registered_maps(dbif)
    dbif.close()
Example #42
0
def main():
    inmap = options["input"]
    outmap = options["output"]
    coor = options["coor"]
    coor = coor.replace(",", " ")

    global tmp, grass_version

    # setup temporary files
    tmp = grass.tempfile()

    # check for LatLong location
    if grass.locn_is_latlong():
        grass.fatal("Module works only in locations with cartesian coordinate system")

    # check if input file exists
    if not grass.find_file(inmap, element="vector")["file"]:
        grass.fatal(_("<%s> does not exist.") % inmap)

    ## DO IT ##
    ## add categories to boundaries
    grass.run_command(
        "v.category",
        input_=inmap,
        option="add",
        type_="boundary",
        output="v_temp_bcats",
        quiet=True,
        stderr=None,
    )

    ## export polygons to CSV + WKT
    tmp1 = tmp + ".csv"
    tmp2 = tmp + "2.csv"
    grass.run_command(
        "v.out.ogr",
        input_="v_temp_bcats",
        output=tmp1,
        format_="CSV",
        type_=("boundary"),
        lco="GEOMETRY=AS_WKT",
        quiet=True,
        stderr=None,
    )

    ## convert lines to polygons
    f1 = open(tmp1, "r")
    f2 = open(tmp2, "w")
    for line in f1:
        f2.write(
            line.replace("LINESTRING", "POLYGON")
            .replace(" (", " ((")
            .replace(')"', '))"')
        )
    f1.close()
    f2.close()

    with open(tmp2, "r") as f:
        print(f.read())

    ## open CSV with OGR and get layer name
    f = ogr.Open(tmp2, 0)
    lyr = f.GetLayer(0)
    lyr_name = lyr.GetName()

    ## make spatial query with coordinates
    coords = "%s %s" % (coor, coor)
    tmp3 = tmp + "_v_temp_select.shp"
    cmd = "ogr2ogr " + " -spat " + coords + " " + tmp3 + " " + tmp2 + " " + lyr_name
    os.system(cmd)

    ## open SHP with OGR and get layer name
    f = ogr.Open(tmp3, 0)
    lyr = f.GetLayer(0)
    lyr_name = lyr.GetName()

    ## print selected objects to stdout or write into vector map
    if flags["p"]:
        cmd = "ogrinfo -al -fields=YES -geom=SUMMARY" + " " + tmp3 + " " + lyr_name
        os.system(cmd)
    else:
        grass.run_command(
            "v.in.ogr",
            input_=tmp3,
            layer=lyr_name,
            output=outmap,
            flags="c",
            quiet=True,
            stderr=None,
        )
Example #43
0
def matchhist(original, target, matched):
    # pan/intensity histogram matching using numpy arrays
    grass.message(_("Histogram matching..."))

    # input images
    original = original.split("@")[0]
    target = target.split("@")[0]
    images = [original, target]

    # create a dictionary to hold arrays for each image
    arrays = {}

    for img in images:
        # calculate number of cells for each grey value for for each image
        stats_out = grass.pipe_command("r.stats", flags="cin", input=img, sep=":")
        stats = grass.decode(stats_out.communicate()[0]).split("\n")[:-1]
        stats_dict = dict(s.split(":", 1) for s in stats)
        total_cells = 0  # total non-null cells
        for j in stats_dict:
            stats_dict[j] = int(stats_dict[j])
            if j != "*":
                total_cells += stats_dict[j]

        if total_cells < 1:
            grass.fatal(_("Input has no data. Check region settings."))

        # Make a 2x256 structured array for each image with a
        #   cumulative distribution function (CDF) for each grey value.
        #   Grey value is the integer (i4) and cdf is float (f4).

        arrays[img] = np.zeros((256,), dtype=("i4,f4"))
        cum_cells = (
            0  # cumulative total of cells for sum of current and all lower grey values
        )

        for n in range(0, 256):
            if str(n) in stats_dict:
                num_cells = stats_dict[str(n)]
            else:
                num_cells = 0

            cum_cells += num_cells

            # cdf is the the number of cells at or below a given grey value
            #   divided by the total number of cells
            cdf = float(cum_cells) / float(total_cells)

            # insert values into array
            arrays[img][n] = (n, cdf)

    # open file for reclass rules
    outfile = open(grass.tempfile(), "w")

    for i in arrays[original]:
        # for each grey value and corresponding cdf value in original, find the
        #   cdf value in target that is closest to the target cdf value
        difference_list = []
        for j in arrays[target]:
            # make a list of the difference between each original cdf value and
            #   the target cdf value
            difference_list.append(abs(i[1] - j[1]))

        # get the smallest difference in the list
        min_difference = min(difference_list)

        for j in arrays[target]:
            # find the grey value in target that corresponds to the cdf
            #   closest to the original cdf
            if j[1] <= i[1] + min_difference and j[1] >= i[1] - min_difference:
                # build a reclass rules file from the original grey value and
                #   corresponding grey value from target
                out_line = "%d = %d\n" % (i[0], j[0])
                outfile.write(out_line)
                break

    outfile.close()

    # create reclass of target from reclass rules file
    result = grass.core.find_file(matched, element="cell")
    if result["fullname"]:
        grass.run_command(
            "g.remove", flags="f", quiet=True, type="raster", name=matched
        )
        grass.run_command("r.reclass", input=original, out=matched, rules=outfile.name)
    else:
        grass.run_command("r.reclass", input=original, out=matched, rules=outfile.name)

    # Cleanup
    # remove the rules file
    grass.try_remove(outfile.name)

    # return reclass of target with histogram that matches original
    return matched
Example #44
0
def main():

    # leggo variabili
    r_elevation = options['dem'].split('@')[0]
    mapname = options['dem'].replace("@", " ")
    mapname = mapname.split()
    mapname[0] = mapname[0].replace(".", "_")
    start = options['start']
    start_ = start.split('@')[0]
    gfile = grass.find_file(start, element='vector')
    if not gfile['name']:
        grass.fatal(_("Vector map <%s> not found") % infile)

    #x = options['x']
    #y = options['y']
    #z = options['z']
    ang = options['ang']
    red = options['red']
    m = options['m']
    num = options['num']

    n = options['n']

    #if n == '':
    #    n = 1
    #else:
    #    n = float(n)

    grass.message("Setting variables...")
    prefix = options['prefix']
    rocks = prefix + '_propagation'
    v = prefix + '_vel'
    vMax = v + '_max'
    vMean = v + '_mean'
    e = prefix + '_en'
    eMax = e + '_max'
    eMean = e + '_mean'

    gregion = grass.region()
    PixelWidth = gregion['ewres']

    if n == '':
        n = 1
        d_buff = (float(num) * PixelWidth) / 2
    else:
        n = float(n)
        d_buff = n

    #d_buff = (n * PixelWidth)/2

    grass.message("Defining starting points...")
    if int(num) == 1:
        grass.run_command('g.copy',
                          vector=start + ',start_points_',
                          quiet=True)
    else:
        grass.run_command('v.buffer',
                          input=start,
                          type='point',
                          output='start_buffer_',
                          distance=d_buff,
                          quiet=True)

        grass.run_command('v.random',
                          input='start_buffer_',
                          npoints=num,
                          output='start_random_',
                          flags='a',
                          quiet=True)

        grass.run_command('v.patch',
                          input=start + ',start_random_',
                          output='start_points_',
                          quiet=True)

    #v.buffer input=punto type=point output=punti_buffer distance=$cellsize
    #v.random -a output=random n=$numero input=punti_buffer
    #v.patch input=punto,random output=patch1

    #creo raster (che sara' il DEM di input) con valore 1
    grass.mapcalc('uno=$dem*0+1', dem=r_elevation, quiet=True)
    what = grass.read_command(
        'r.what',
        map=r_elevation,
        points='start_points_',
        null_value=
        "-9999",  # TODO: a better test for points outside the current region is needed
        quiet=True)
    quota = what.split('\n')

    #array per la somma dei massi
    tot = garray.array()
    tot.read(r_elevation)
    tot[...] = (tot * 0.0).astype(float)
    somma = garray.array()

    #array per le velocita
    velocity = garray.array()
    velMax = garray.array()
    velMean = garray.array()

    #array per energia
    energy = garray.array()
    enMax = garray.array()
    enMean = garray.array()
    grass.message("Waiting...")
    for i in xrange(len(quota) - 1):
        grass.message("Shoot number: " + str(i + 1))
        z = float(quota[i].split('||')[1])
        point = quota[i].split('||')[0]
        x = float(point.split('|')[0])
        y = float(point.split('|')[1])
        #print x,y,z
        # Calcolo cost (sostituire i punti di partenza in start_raster al pusto di punto)
        grass.run_command('r.cost',
                          flags="k",
                          input='uno',
                          output='costo',
                          start_coordinates=str(x) + ',' + str(y),
                          quiet=True,
                          overwrite=True)

        #trasforma i valori di distanza celle in valori metrici utilizzando la risoluzione raster
        grass.mapcalc('costo_m=costo*(ewres()+nsres())/2', overwrite=True)

        # calcola A=tangente angolo visuale (INPUT) * costo in metri
        grass.mapcalc('A=tan($ang)*costo_m', ang=ang, overwrite=True)
        grass.mapcalc('C=$z-A', z=z, overwrite=True)
        grass.mapcalc('D=C-$dem', dem=r_elevation, overwrite=True)
        # area di espansione
        grass.mapcalc('E=if(D>0,1,null())', overwrite=True)
        # valore di deltaH (F)
        grass.mapcalc('F=D*E', overwrite=True)

        # calcolo velocita
        grass.mapcalc('vel = $red*sqrt(2*9.8*F)', red=red, overwrite=True)
        velocity.read('vel')
        velMax[...] = (np.where(velocity > velMax, velocity,
                                velMax)).astype(float)
        velMean[...] = (velocity + velMean).astype(float)

        #calcolo numero massi
        grass.mapcalc('somma=if(vel>0,1,0)', overwrite=True)
        somma.read('somma')
        tot[...] = (somma + tot).astype(float)

        # calcolo energia
        grass.mapcalc('en=$m*9.8*F/1000', m=m, overwrite=True)
        energy.read('en')
        enMax[...] = (np.where(energy > enMax, energy, enMax)).astype(float)
        enMean[...] = (energy + enMean).astype(float)
    grass.message("Create output maps...")
    tot.write(rocks)
    velMax.write(vMax)
    velMean[...] = (velMean / i).astype(float)
    velMean.write(vMean)
    enMax.write(eMax)
    enMean[...] = (enMean / i).astype(float)
    enMean.write(eMean)
    #grass.run_command('d.mon',
    #    start = 'wx0')
    #grass.run_command('d.rast' ,
    #    map=vMax)
    #grass.run_command('d.rast' ,
    #    map=vMean)
    #grass.run_command('d.rast' ,
    #    map=eMax)
    #grass.run_command('d.rast' ,
    #    map=eMean)
    if int(num) == 1:
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=('start_points_'),
                          quiet=True)
    else:
        grass.run_command('g.rename',
                          vect='start_points_,' + prefix + '_starting',
                          quiet=True)
        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=('start_buffer_', 'start_random_'),
                          quiet=True)
    grass.run_command('g.remove',
                      flags='f',
                      type='raster',
                      name=('uno', 'costo', 'costo_m', 'A', 'C', 'D', 'E', 'F',
                            'en', 'vel', 'somma'),
                      quiet=True)
    grass.message("Done!")
Example #45
0
def main():
    if not hasNumPy:
        grass.fatal(_("Required dependency NumPy not found. Exiting."))

    sharpen = options["method"]  # sharpening algorithm
    ms1_orig = options["blue"]  # blue channel
    ms2_orig = options["green"]  # green channel
    ms3_orig = options["red"]  # red channel
    pan_orig = options["pan"]  # high res pan channel
    out = options["output"]  # prefix for output RGB maps
    bits = options["bitdepth"]  # bit depth of image channels
    bladjust = flags["l"]  # adjust blue channel
    sproc = flags["s"]  # serial processing
    rescale = flags["r"]  # rescale to spread pixel values to entire 0-255 range

    # Checking bit depth
    bits = float(bits)
    if bits < 2 or bits > 30:
        grass.warning(_("Bit depth is outside acceptable range"))
        return

    outb = grass.core.find_file("%s_blue" % out)
    outg = grass.core.find_file("%s_green" % out)
    outr = grass.core.find_file("%s_red" % out)

    if (
        outb["name"] != "" or outg["name"] != "" or outr["name"] != ""
    ) and not grass.overwrite():
        grass.warning(
            _(
                "Maps with selected output prefix names already exist."
                " Delete them or use overwrite flag"
            )
        )
        return

    pid = str(os.getpid())

    # convert input image channels to 8 bit for processing
    ms1 = "tmp%s_ms1" % pid
    ms2 = "tmp%s_ms2" % pid
    ms3 = "tmp%s_ms3" % pid
    pan = "tmp%s_pan" % pid

    if not rescale:
        if bits == 8:
            grass.message(_("Using 8bit image channels"))
            if sproc:
                # serial processing
                grass.run_command(
                    "g.copy",
                    raster="%s,%s" % (ms1_orig, ms1),
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "g.copy",
                    raster="%s,%s" % (ms2_orig, ms2),
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "g.copy",
                    raster="%s,%s" % (ms3_orig, ms3),
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "g.copy",
                    raster="%s,%s" % (pan_orig, pan),
                    quiet=True,
                    overwrite=True,
                )
            else:
                # parallel processing
                pb = grass.start_command(
                    "g.copy",
                    raster="%s,%s" % (ms1_orig, ms1),
                    quiet=True,
                    overwrite=True,
                )
                pg = grass.start_command(
                    "g.copy",
                    raster="%s,%s" % (ms2_orig, ms2),
                    quiet=True,
                    overwrite=True,
                )
                pr = grass.start_command(
                    "g.copy",
                    raster="%s,%s" % (ms3_orig, ms3),
                    quiet=True,
                    overwrite=True,
                )
                pp = grass.start_command(
                    "g.copy",
                    raster="%s,%s" % (pan_orig, pan),
                    quiet=True,
                    overwrite=True,
                )

                pb.wait()
                pg.wait()
                pr.wait()
                pp.wait()

        else:
            grass.message(_("Converting image chanels to 8bit for processing"))
            maxval = pow(2, bits) - 1
            if sproc:
                # serial processing
                grass.run_command(
                    "r.rescale",
                    input=ms1_orig,
                    from_="0,%f" % maxval,
                    output=ms1,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "r.rescale",
                    input=ms2_orig,
                    from_="0,%f" % maxval,
                    output=ms2,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "r.rescale",
                    input=ms3_orig,
                    from_="0,%f" % maxval,
                    output=ms3,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                grass.run_command(
                    "r.rescale",
                    input=pan_orig,
                    from_="0,%f" % maxval,
                    output=pan,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )

            else:
                # parallel processing
                pb = grass.start_command(
                    "r.rescale",
                    input=ms1_orig,
                    from_="0,%f" % maxval,
                    output=ms1,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                pg = grass.start_command(
                    "r.rescale",
                    input=ms2_orig,
                    from_="0,%f" % maxval,
                    output=ms2,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                pr = grass.start_command(
                    "r.rescale",
                    input=ms3_orig,
                    from_="0,%f" % maxval,
                    output=ms3,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )
                pp = grass.start_command(
                    "r.rescale",
                    input=pan_orig,
                    from_="0,%f" % maxval,
                    output=pan,
                    to="0,255",
                    quiet=True,
                    overwrite=True,
                )

                pb.wait()
                pg.wait()
                pr.wait()
                pp.wait()

    else:
        grass.message(_("Rescaling image chanels to 8bit for processing"))

        min_ms1 = int(grass.raster_info(ms1_orig)["min"])
        max_ms1 = int(grass.raster_info(ms1_orig)["max"])
        min_ms2 = int(grass.raster_info(ms2_orig)["min"])
        max_ms2 = int(grass.raster_info(ms2_orig)["max"])
        min_ms3 = int(grass.raster_info(ms3_orig)["min"])
        max_ms3 = int(grass.raster_info(ms3_orig)["max"])
        min_pan = int(grass.raster_info(pan_orig)["min"])
        max_pan = int(grass.raster_info(pan_orig)["max"])

        maxval = pow(2, bits) - 1
        if sproc:
            # serial processing
            grass.run_command(
                "r.rescale",
                input=ms1_orig,
                from_="%f,%f" % (min_ms1, max_ms1),
                output=ms1,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            grass.run_command(
                "r.rescale",
                input=ms2_orig,
                from_="%f,%f" % (min_ms2, max_ms2),
                output=ms2,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            grass.run_command(
                "r.rescale",
                input=ms3_orig,
                from_="%f,%f" % (min_ms3, max_ms3),
                output=ms3,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            grass.run_command(
                "r.rescale",
                input=pan_orig,
                from_="%f,%f" % (min_pan, max_pan),
                output=pan,
                to="0,255",
                quiet=True,
                overwrite=True,
            )

        else:
            # parallel processing
            pb = grass.start_command(
                "r.rescale",
                input=ms1_orig,
                from_="%f,%f" % (min_ms1, max_ms1),
                output=ms1,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            pg = grass.start_command(
                "r.rescale",
                input=ms2_orig,
                from_="%f,%f" % (min_ms2, max_ms2),
                output=ms2,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            pr = grass.start_command(
                "r.rescale",
                input=ms3_orig,
                from_="%f,%f" % (min_ms3, max_ms3),
                output=ms3,
                to="0,255",
                quiet=True,
                overwrite=True,
            )
            pp = grass.start_command(
                "r.rescale",
                input=pan_orig,
                from_="%f,%f" % (min_pan, max_pan),
                output=pan,
                to="0,255",
                quiet=True,
                overwrite=True,
            )

            pb.wait()
            pg.wait()
            pr.wait()
            pp.wait()

    # get PAN resolution:
    kv = grass.raster_info(map=pan)
    nsres = kv["nsres"]
    ewres = kv["ewres"]
    panres = (nsres + ewres) / 2

    # clone current region
    grass.use_temp_region()
    grass.run_command("g.region", res=panres, align=pan)

    # Select sharpening method
    grass.message(_("Performing pan sharpening with hi res pan image: %f" % panres))
    if sharpen == "brovey":
        brovey(pan, ms1, ms2, ms3, out, pid, sproc)
    elif sharpen == "ihs":
        ihs(pan, ms1, ms2, ms3, out, pid, sproc)
    elif sharpen == "pca":
        pca(pan, ms1, ms2, ms3, out, pid, sproc)
    # Could add other sharpening algorithms here, e.g. wavelet transformation

    grass.message(_("Assigning grey equalized color tables to output images..."))

    # equalized grey scales give best contrast
    grass.message(_("setting pan-sharpened channels to equalized grey scale"))
    for ch in ["red", "green", "blue"]:
        grass.run_command(
            "r.colors", quiet=True, map="%s_%s" % (out, ch), flags="e", color="grey"
        )

    # Landsat too blue-ish because panchromatic band less sensitive to blue
    # light, so output blue channed can be modified
    if bladjust:
        grass.message(_("Adjusting blue channel color table..."))
        blue_colors = ["0 0 0 0\n5% 0 0 0\n67% 255 255 255\n100% 255 255 255"]
        # these previous colors are way too blue for landsat
        # blue_colors = ['0 0 0 0\n10% 0 0 0\n20% 200 200 200\n40% 230 230 230\n67% 255 255 255\n100% 255 255 255']
        bc = grass.feed_command("r.colors", quiet=True, map="%s_blue" % out, rules="-")
        bc.stdin.write(grass.encode("\n".join(blue_colors)))
        bc.stdin.close()

    # output notice
    grass.verbose(_("The following pan-sharpened output maps have been generated:"))
    for ch in ["red", "green", "blue"]:
        grass.verbose(_("%s_%s") % (out, ch))

    grass.verbose(_("To visualize output, run: g.region -p raster=%s_red" % out))
    grass.verbose(_("d.rgb r=%s_red g=%s_green b=%s_blue" % (out, out, out)))
    grass.verbose(
        _("If desired, combine channels into a single RGB map with 'r.composite'.")
    )
    grass.verbose(_("Channel colors can be rebalanced using i.colors.enhance."))

    # write cmd history:
    for ch in ["red", "green", "blue"]:
        grass.raster_history("%s_%s" % (out, ch))

    # create a group with the three outputs
    # grass.run_command('i.group', group=out,
    #                  input="{n}_red,{n}_blue,{n}_green".format(n=out))

    # Cleanup
    grass.message(_("cleaning up temp files"))
    try:
        grass.run_command(
            "g.remove", flags="f", type="raster", pattern="tmp%s*" % pid, quiet=True
        )
    except:
        ""
Example #46
0
def pca(pan, ms1, ms2, ms3, out, pid, sproc):

    grass.verbose(_("Using PCA/inverse PCA algorithm"))
    grass.message(_("Creating PCA images and calculating eigenvectors..."))

    # initial PCA with RGB channels
    pca_out = grass.read_command(
        "i.pca",
        quiet=True,
        rescale="0,0",
        input="%s,%s,%s" % (ms1, ms2, ms3),
        output="tmp%s.pca" % pid,
    )
    if len(pca_out) < 1:
        grass.fatal(_("Input has no data. Check region settings."))

    b1evect = []
    b2evect = []
    b3evect = []
    for l in pca_out.replace("(", ",").replace(")", ",").splitlines():
        b1evect.append(float(l.split(",")[1]))
        b2evect.append(float(l.split(",")[2]))
        b3evect.append(float(l.split(",")[3]))

    # inverse PCA with hi res pan channel substituted for principal component 1
    pca1 = "tmp%s.pca.1" % pid
    pca2 = "tmp%s.pca.2" % pid
    pca3 = "tmp%s.pca.3" % pid
    b1evect1 = b1evect[0]
    b1evect2 = b1evect[1]
    b1evect3 = b1evect[2]
    b2evect1 = b2evect[0]
    b2evect2 = b2evect[1]
    b2evect3 = b2evect[2]
    b3evect1 = b3evect[0]
    b3evect2 = b3evect[1]
    b3evect3 = b3evect[2]

    # Histogram matching
    outname = "tmp%s_pan1" % pid
    panmatch1 = matchhist(pan, ms1, outname)

    outname = "tmp%s_pan2" % pid
    panmatch2 = matchhist(pan, ms2, outname)

    outname = "tmp%s_pan3" % pid
    panmatch3 = matchhist(pan, ms3, outname)

    grass.message(_("Performing inverse PCA ..."))

    # Get mean value of each channel
    stats1 = grass.parse_command(
        "r.univar", map=ms1, flags="g", parse=(grass.parse_key_val, {"sep": "="})
    )
    stats2 = grass.parse_command(
        "r.univar", map=ms2, flags="g", parse=(grass.parse_key_val, {"sep": "="})
    )
    stats3 = grass.parse_command(
        "r.univar", map=ms3, flags="g", parse=(grass.parse_key_val, {"sep": "="})
    )

    b1mean = float(stats1["mean"])
    b2mean = float(stats2["mean"])
    b3mean = float(stats3["mean"])

    if sproc:
        # serial processing
        outr = "%s_red" % out
        outg = "%s_green" % out
        outb = "%s_blue" % out

        cmd1 = "$outb = 1 * round(($panmatch1 * $b1evect1) + ($pca2 * $b1evect2) + ($pca3 * $b1evect3) + $b1mean)"
        cmd2 = "$outg = 1 * round(($panmatch2 * $b2evect1) + ($pca2 * $b2evect2) + ($pca3 * $b2evect3) + $b2mean)"
        cmd3 = "$outr = 1 * round(($panmatch3 * $b3evect1) + ($pca2 * $b3evect2) + ($pca3 * $b3evect3) + $b3mean)"

        cmd = "\n".join([cmd1, cmd2, cmd3])

        grass.mapcalc(
            cmd,
            outb=outb,
            outg=outg,
            outr=outr,
            panmatch1=panmatch1,
            panmatch2=panmatch2,
            panmatch3=panmatch3,
            pca2=pca2,
            pca3=pca3,
            b1evect1=b1evect1,
            b2evect1=b2evect1,
            b3evect1=b3evect1,
            b1evect2=b1evect2,
            b2evect2=b2evect2,
            b3evect2=b3evect2,
            b1evect3=b1evect3,
            b2evect3=b2evect3,
            b3evect3=b3evect3,
            b1mean=b1mean,
            b2mean=b2mean,
            b3mean=b3mean,
            overwrite=True,
        )
    else:
        # parallel processing
        pb = grass.mapcalc_start(
            "%s_blue = 1 * round((%s * %f) + (%s * %f) + (%s * %f) + %f)"
            % (out, panmatch1, b1evect1, pca2, b1evect2, pca3, b1evect3, b1mean),
            overwrite=True,
        )

        pg = grass.mapcalc_start(
            "%s_green = 1 * round((%s * %f) + (%s * %f) + (%s * %f) + %f)"
            % (out, panmatch2, b2evect1, pca2, b2evect2, pca3, b2evect3, b2mean),
            overwrite=True,
        )

        pr = grass.mapcalc_start(
            "%s_red = 1 * round((%s * %f) + (%s * %f) + (%s * %f) + %f)"
            % (out, panmatch3, b3evect1, pca2, b3evect2, pca3, b3evect3, b3mean),
            overwrite=True,
        )

        pb.wait(), pg.wait(), pr.wait()
        try:
            pb.terminate(), pg.terminate(), pr.terminate()
        except:
            ""

    # Cleanup
    grass.run_command(
        "g.remove",
        flags="f",
        quiet=True,
        type="raster",
        name="%s,%s,%s" % (panmatch1, panmatch2, panmatch3),
    )
Example #47
0
def reclass(inf, outf, lim, clump, diag, les):
    infile = inf
    outfile = outf
    lesser = les
    limit = lim
    clumped = clump
    diagonal = diag

    s = grass.read_command("g.region", flags='p')
    s = decode(s)
    kv = grass.parse_key_val(s, sep=':')
    s = kv['projection'].strip().split()
    if s == '0':
        grass.fatal(_("xy-locations are not supported"))
        grass.fatal(_("Need projected data with grids in meters"))

    if not grass.find_file(infile)['name']:
        grass.fatal(_("Raster map <%s> not found") % infile)

    if clumped and diagonal:
        grass.fatal(_("flags c and d are mutually exclusive"))

    if clumped:
        clumpfile = infile
    else:
        clumpfile = "%s.clump.%s" % (infile.split('@')[0], outfile)
        TMPRAST.append(clumpfile)

        if not grass.overwrite():
            if grass.find_file(clumpfile)['name']:
                grass.fatal(_("Temporary raster map <%s> exists") % clumpfile)
        if diagonal:
            grass.message(
                _("Generating a clumped raster file including "
                  "diagonal neighbors..."))
            grass.run_command('r.clump',
                              flags='d',
                              input=infile,
                              output=clumpfile)
        else:
            grass.message(_("Generating a clumped raster file ..."))
            grass.run_command('r.clump', input=infile, output=clumpfile)

    if lesser:
        grass.message(
            _("Generating a reclass map with area size less than "
              "or equal to %f hectares...") % limit)
    else:
        grass.message(
            _("Generating a reclass map with area size greater "
              "than or equal to %f hectares...") % limit)

    recfile = outfile + '.recl'
    TMPRAST.append(recfile)

    sflags = 'aln'
    if grass.raster_info(infile)['datatype'] in ('FCELL', 'DCELL'):
        sflags += 'i'
    p1 = grass.pipe_command('r.stats',
                            flags=sflags,
                            input=(clumpfile, infile),
                            sep=';')
    p2 = grass.feed_command('r.reclass',
                            input=clumpfile,
                            output=recfile,
                            rules='-')
    rules = ''
    for line in p1.stdout:
        f = decode(line).rstrip(os.linesep).split(';')
        if len(f) < 5:
            continue
        hectares = float(f[4]) * 0.0001
        if lesser:
            test = hectares <= limit
        else:
            test = hectares >= limit
        if test:
            rules += "%s = %s %s\n" % (f[0], f[2], f[3])
    if rules:
        p2.stdin.write(encode(rules))
    p1.wait()
    p2.stdin.close()
    p2.wait()
    if p2.returncode != 0:
        if lesser:
            grass.fatal(
                _("No areas of size less than or equal to %f "
                  "hectares found.") % limit)
        else:
            grass.fatal(
                _("No areas of size greater than or equal to %f "
                  "hectares found.") % limit)
    grass.mapcalc("$outfile = $recfile", outfile=outfile, recfile=recfile)
Example #48
0
def main():
    global tile, tmpdir, in_temp

    in_temp = False

    input = options['input']
    output = options['output']
    one = flags['1']

    #are we in LatLong location?
    s = grass.read_command("g.proj", flags='j')
    kv = grass.parse_key_val(s)
    if kv['+proj'] != 'longlat':
        grass.fatal(_("This module only operates in LatLong locations"))

    # use these from now on:
    infile = input
    while infile[-4:].lower() in ['.hgt', '.zip']:
        infile = infile[:-4]
    (fdir, tile) = os.path.split(infile)

    if not output:
        tileout = tile
    else:
        tileout = output

    zipfile = infile + ".hgt.zip"
    hgtfile = os.path.join(fdir, tile[:7] + ".hgt")
    if os.path.isfile(zipfile):
        #### check if we have unzip
        if not grass.find_program('unzip'):
            grass.fatal(
                _('The "unzip" program is required, please install it first'))

        # really a ZIP file?
        # make it quiet in a safe way (just in case -qq isn't portable)
        tenv = os.environ.copy()
        tenv['UNZIP'] = '-qq'
        if grass.call(['unzip', '-t', zipfile], env=tenv) != 0:
            grass.fatal(
                _("'%s' does not appear to be a valid zip file.") % zipfile)

        is_zip = True
    elif os.path.isfile(hgtfile):
        # try and see if it's already unzipped
        is_zip = False
    else:
        grass.fatal(_("File '%s' or '%s' not found") % (zipfile, hgtfile))

    #make a temporary directory
    tmpdir = grass.tempfile()
    grass.try_remove(tmpdir)
    os.mkdir(tmpdir)

    if is_zip:
        shutil.copyfile(zipfile, os.path.join(tmpdir, tile + ".hgt.zip"))
    else:
        shutil.copyfile(hgtfile, os.path.join(tmpdir, tile + ".hgt"))

    #change to temporary directory
    os.chdir(tmpdir)
    in_temp = True

    zipfile = tile + ".hgt.zip"
    hgtfile = tile[:7] + ".hgt"
    bilfile = tile + ".bil"

    if is_zip:
        #unzip & rename data file:
        grass.message(_("Extracting '%s'...") % infile)
        if grass.call(['unzip', zipfile], env=tenv) != 0:
            grass.fatal(_("Unable to unzip file."))

    grass.message(_("Converting input file to BIL..."))
    os.rename(hgtfile, bilfile)

    north = tile[0]
    ll_latitude = int(tile[1:3])
    east = tile[3]
    ll_longitude = int(tile[4:7])

    # are we on the southern hemisphere? If yes, make LATITUDE negative.
    if north == "S":
        ll_latitude *= -1

    # are we west of Greenwich? If yes, make LONGITUDE negative.
    if east == "W":
        ll_longitude *= -1

    # Calculate Upper Left from Lower Left
    ulxmap = "%.1f" % ll_longitude
    # SRTM90 tile size is 1 deg:
    ulymap = "%.1f" % (ll_latitude + 1)

    if not one:
        tmpl = tmpl3sec
    else:
        grass.message(_("Attempting to import 1-arcsec data."))
        tmpl = tmpl1sec

    header = tmpl % (ulxmap, ulymap)
    hdrfile = tile + '.hdr'
    outf = file(hdrfile, 'w')
    outf.write(header)
    outf.close()

    #create prj file: To be precise, we would need EGS96! But who really cares...
    prjfile = tile + '.prj'
    outf = file(prjfile, 'w')
    outf.write(proj)
    outf.close()

    if grass.run_command('r.in.gdal', input=bilfile, out=tileout) != 0:
        grass.fatal(_("Unable to import data"))

    # nice color table
    grass.run_command('r.colors', map=tileout, color='srtm')

    # write cmd history:
    grass.raster_history(tileout)

    grass.message(_("Done: generated map ") + tileout)
    grass.message(
        _("(Note: Holes in the data can be closed with 'r.fillnulls' using splines)"
          ))
Example #49
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    input = options["input"]
    where = options["where"]
    columns = options["columns"]
    tempwhere = options["t_where"]
    layer = options["layer"]
    separator = grass.separator(options["separator"])

    if where == "" or where == " " or where == "\n":
        where = None

    if columns == "" or columns == " " or columns == "\n":
        columns = None

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "stvds")

    rows = sp.get_registered_maps("name,layer,mapset,start_time,end_time",
                                  tempwhere, "start_time", None)

    col_names = ""
    if rows:
        for row in rows:
            vector_name = "%s@%s" % (row["name"], row["mapset"])
            # In case a layer is defined in the vector dataset,
            # we override the option layer
            if row["layer"]:
                layer = row["layer"]

            select = grass.read_command("v.db.select",
                                        map=vector_name,
                                        layer=layer,
                                        columns=columns,
                                        separator="%s" % (separator),
                                        where=where)

            if not select:
                grass.fatal(
                    _("Unable to run v.db.select for vector map <%s> "
                      "with layer %s") % (vector_name, layer))
            # The first line are the column names
            list = select.split("\n")
            count = 0
            for entry in list:
                if entry.strip() != "":
                    # print the column names in case they change
                    if count == 0:
                        col_names_new = "start_time%send_time%s%s" % (
                            separator, separator, entry)
                        if col_names != col_names_new:
                            col_names = col_names_new
                            print(col_names)
                    else:
                        if row["end_time"]:
                            print("%s%s%s%s%s" %
                                  (row["start_time"], separator,
                                   row["end_time"], separator, entry))
                        else:
                            print("%s%s%s%s" % (row["start_time"], separator,
                                                separator, entry))
                    count += 1
Example #50
0
(C) 2012 by the GRASS Development Team

This program is free software under the GNU General Public License
(>=v2). Read the file COPYING that comes with GRASS for details.

@author Stepan Turek <stepan.turek seznam.cz> (Mentor: Martin Landa)
"""

import os
import grass.script as grass

try:
    from osgeo import gdal
    from osgeo import gdalconst
except:
    grass.fatal(_("Unable to load GDAL Python bindings"))

import xml.etree.ElementTree as etree

from wms_base import WMSBase


class NullDevice():
    def write(self, s):
        pass


class WMSGdalDrv(WMSBase):
    def _createXML(self):
        """!Create XML for GDAL WMS driver
        
Example #51
0
def main():
    vector = options['map']
    layer = options['layer']
    column = options['column']
    value = options['value']
    qcolumn = options['query_column']
    where = options['where']
    sqlitefile = options['sqliteextra']

    mapset = grass.gisenv()['MAPSET']

    # does map exist in CURRENT mapset?
    if not grass.find_file(vector, element='vector', mapset=mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    try:
        f = grass.vector_db(vector)[int(layer)]
    except KeyError:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))

    table = f['table']
    database = f['database']
    driver = f['driver']

    # check for SQLite backend for extra functions
    if sqlitefile and driver != "sqlite":
        grass.fatal(_("Use of libsqlitefunctions only with SQLite backend"))
    if driver == "sqlite" and sqlitefile:
        if not os.access(sqlitefile, os.R_OK):
            grass.fatal(_("File <%s> not found") % sqlitefile)

    # checking column types
    try:
        coltype = grass.vector_columns(vector, layer)[column]['type']
    except KeyError:
        grass.fatal(_('Column <%s> not found') % column)

    if qcolumn:
        if value:
            grass.fatal(_('<value> and <qcolumn> are mutually exclusive'))
        # special case: we copy from another column
        value = qcolumn
    else:
        if not value:
            grass.fatal(_('Either <value> or <qcolumn> must be given'))
        # we insert a value
        if coltype.upper() not in ["INTEGER", "DOUBLE PRECISION"]:
            value = "'%s'" % value

    cmd = "UPDATE %s SET %s=%s" % (table, column, value)
    if where:
        cmd += " WHERE " + where

    # SQLite: preload extra functions from extension lib if provided by user
    if sqlitefile:
        sqliteload = "SELECT load_extension('%s');\n" % sqlitefile
        cmd = sqliteload + cmd

    grass.verbose("SQL: \"%s\"" % cmd)
    grass.write_command('db.execute',
                        input='-',
                        database=database,
                        driver=driver,
                        stdin=cmd)

    # write cmd history:
    grass.vector_history(vector)

    return 0
Example #52
0
def main():
    user = password = None
    api_url = 'https://scihub.copernicus.eu/dhus'

    if options['settings'] == '-':
        # stdin
        import getpass
        user = raw_input(_('Insert username: '******'Insert password: '******'Insert API URL (leave empty for {}): ').format(api_url))
        if url:
            api_url = url
    else:
        try:
            with open(options['settings'], 'r') as fd:
                lines = list(filter(None,
                                    (line.rstrip()
                                     for line in fd)))  # non-blank lines only
                if len(lines) < 2:
                    gs.fatal(_("Invalid settings file"))
                user = lines[0].strip()
                password = lines[1].strip()
                if len(lines) > 2:
                    api_url = lines[2].strip()
        except IOError as e:
            gs.fatal(_("Unable to open settings file: {}").format(e))

    if user is None or password is None:
        gs.fatal(_("No user or password given"))

    map_box = get_aoi_box(options['map'])

    sortby = options['sort'].split(',')
    if options['producttype'] in ('SLC', 'GRD', 'OCN'):
        if options['clouds']:
            gs.info("Option <{}> ignored: cloud cover percentage "
                    "is not defined for product type {}".format(
                        "clouds", options['producttype']))
            options['clouds'] = None
        try:
            sortby.remove('cloudcoverpercentage')
        except ValueError:
            pass
    try:
        downloader = SentinelDownloader(user, password, api_url)

        if options['uuid']:
            downloader.set_uuid(options['uuid'].split(','))
        else:
            query = {}
            if options['query']:
                for item in options['query'].split(','):
                    k, v = item.split('=')
                    query[k] = v
            downloader.filter(area=map_box,
                              area_relation=options['area_relation'],
                              clouds=options['clouds'],
                              producttype=options['producttype'],
                              limit=options['limit'],
                              query=query,
                              start=options['start'],
                              end=options['end'],
                              sortby=sortby,
                              asc=options['order'] == 'asc')
    except StandardError as e:
        gs.fatal(
            _('Unable to connect Copernicus Open Access Hub: {}').format(e))

    if options['footprints']:
        downloader.save_footprints(options['footprints'])

    if flags['l']:
        downloader.list()
        return

    downloader.download(options['output'])

    return 0
Example #53
0
This program is free software under the GNU General Public License
(>=v2). Read the file COPYING that comes with GRASS for details.

@author Stepan Turek <stepan.turek seznam.cz> (Mentor: Martin Landa)
"""

import socket
import grass.script as grass

from time import sleep

try:
    from osgeo import gdal
except:
    grass.fatal(
        _("Unable to load GDAL Python bindings (requires package 'python-gdal' being installed)"
          ))

import numpy as Numeric

Numeric.arrayrange = Numeric.arange

from math import pi, floor

try:
    from urllib2 import HTTPError
    from httplib import HTTPException
except ImportError:
    # python3
    from urllib.error import HTTPError
    from http.client import HTTPException
Example #54
0
    def _download(self):
        """!Downloads data from WMS server using own driver

        @return temp_map with downloaded data
        """
        grass.message(_("Downloading data from WMS server..."))
        server_url = self.params["url"]

        if "?" in self.params["url"]:
            self.params["url"] += "&"
        else:
            self.params["url"] += "?"

        if not self.params["capfile"]:
            self.cap_file = self._fetchCapabilities(self.params)
        else:
            self.cap_file = self.params["capfile"]

        # initialize correct manager according to chosen OGC service
        if self.params["driver"] == "WMTS_GRASS":
            req_mgr = WMTSRequestMgr(self.params, self.bbox, self.region,
                                     self.proj_srs, self.cap_file)
        elif self.params["driver"] == "WMS_GRASS":
            req_mgr = WMSRequestMgr(self.params, self.bbox, self.region,
                                    self.tile_size, self.proj_srs)
        elif self.params["driver"] == "OnEarth_GRASS":
            req_mgr = OnEarthRequestMgr(self.params, self.bbox, self.region,
                                        self.proj_srs, self.cap_file)

        # get information about size in pixels and bounding box of raster, where
        # all tiles will be joined
        map_region = req_mgr.GetMapRegion()

        init = True
        temp_map = None

        fetch_try = 0

        # iterate through all tiles and download them
        while True:

            if fetch_try == 0:
                # get url for request the tile and information for placing the tile into
                # raster with other tiles
                tile = req_mgr.GetNextTile()

            # if last tile has been already downloaded
            if not tile:
                break

            # url for request the tile
            query_url = tile[0]

            # the tile size and offset in pixels for placing it into raster where tiles are joined
            tile_ref = tile[1]
            grass.debug(query_url, 2)
            try:
                wms_data = self._fetchDataFromServer(query_url,
                                                     self.params["username"],
                                                     self.params["password"])
            except (IOError, HTTPException) as e:
                if isinstance(e, HTTPError) and e.code == 401:
                    grass.fatal(
                        _("Authorization failed to '%s' when fetching data.\n%s"
                          ) % (self.params["url"], str(e)))
                else:
                    grass.fatal(
                        _("Unable to fetch data from: '%s'\n%s") %
                        (self.params["url"], str(e)))

            temp_tile = self._tempfile()

            # download data into temporary file
            try:
                temp_tile_opened = open(temp_tile, "wb")
                temp_tile_opened.write(wms_data.read())
            except IOError as e:
                # some servers are not happy with many subsequent requests for tiles done immediately,
                # if immediate request was unsuccessful, try to repeat the request after 5s and 30s breaks
                # TODO probably servers can return more kinds of errors related to this
                # problem (not only 104)
                if isinstance(e,
                              socket.error) and e[0] == 104 and fetch_try < 2:
                    fetch_try += 1

                    if fetch_try == 1:
                        sleep_time = 5
                    elif fetch_try == 2:
                        sleep_time = 30

                    grass.warning(
                        _("Server refused to send data for a tile.\nRequest will be repeated after %d s."
                          ) % sleep_time)

                    sleep(sleep_time)
                    continue
                else:
                    grass.fatal(
                        _("Unable to write data into tempfile.\n%s") % str(e))
            finally:
                temp_tile_opened.close()

            fetch_try = 0

            tile_dataset_info = gdal.Open(temp_tile, gdal.GA_ReadOnly)
            if tile_dataset_info is None:
                # print error xml returned from server
                try:
                    error_xml_opened = open(temp_tile, "rb")
                    err_str = error_xml_opened.read()
                except IOError as e:
                    grass.fatal(
                        _("Unable to read data from tempfile.\n%s") % str(e))
                finally:
                    error_xml_opened.close()

                if err_str is not None:
                    grass.fatal(_("WMS server error: %s") % err_str)
                else:
                    grass.fatal(_("WMS server unknown error"))

            temp_tile_pct2rgb = None
            if tile_dataset_info.RasterCount < 1:
                grass.fatal(
                    _("WMS server error: no band(s) received. Is server URL correct? <%s>"
                      ) % server_url)
            if (tile_dataset_info.RasterCount == 1 and
                    tile_dataset_info.GetRasterBand(1).GetRasterColorTable()
                    is not None):
                # expansion of color table into bands
                temp_tile_pct2rgb = self._tempfile()
                tile_dataset = self._pct2rgb(temp_tile, temp_tile_pct2rgb)
            else:
                tile_dataset = tile_dataset_info

            # initialization of temp_map_dataset, where all tiles are merged
            if init:
                temp_map = self._tempfile()

                driver = gdal.GetDriverByName(self.gdal_drv_format)
                metadata = driver.GetMetadata()
                if (gdal.DCAP_CREATE not in metadata
                        or metadata[gdal.DCAP_CREATE] == "NO"):
                    grass.fatal(
                        _("Driver %s does not supports Create() method") %
                        self.gdal_drv_format)
                self.temp_map_bands_num = tile_dataset.RasterCount
                temp_map_dataset = driver.Create(
                    temp_map,
                    map_region["cols"],
                    map_region["rows"],
                    self.temp_map_bands_num,
                    tile_dataset.GetRasterBand(1).DataType,
                )
                init = False

            # tile is written into temp_map
            tile_to_temp_map = tile_dataset.ReadRaster(
                0,
                0,
                tile_ref["sizeX"],
                tile_ref["sizeY"],
                tile_ref["sizeX"],
                tile_ref["sizeY"],
            )

            temp_map_dataset.WriteRaster(
                tile_ref["t_cols_offset"],
                tile_ref["t_rows_offset"],
                tile_ref["sizeX"],
                tile_ref["sizeY"],
                tile_to_temp_map,
            )

            tile_dataset = None
            tile_dataset_info = None
            grass.try_remove(temp_tile)
            grass.try_remove(temp_tile_pct2rgb)

        if not temp_map:
            return temp_map
        # georeferencing and setting projection of temp_map
        projection = grass.read_command("g.proj",
                                        flags="wf",
                                        epsg=GetEpsg(self.params["srs"]))
        projection = projection.rstrip("\n")
        temp_map_dataset.SetProjection(projection)

        pixel_x_length = (map_region["maxx"] - map_region["minx"]) / int(
            map_region["cols"])
        pixel_y_length = (map_region["miny"] - map_region["maxy"]) / int(
            map_region["rows"])

        geo_transform = [
            map_region["minx"],
            pixel_x_length,
            0.0,
            map_region["maxy"],
            0.0,
            pixel_y_length,
        ]
        temp_map_dataset.SetGeoTransform(geo_transform)
        temp_map_dataset = None

        return temp_map
Example #55
0
    def _computeBbox(self):
        """!Get region extent for WMS query (bbox)
        """
        self._debug("_computeBbox", "started")

        bbox_region_items = {
            'maxy': 'n',
            'miny': 's',
            'maxx': 'e',
            'minx': 'w'
        }
        bbox = {}

        if self.proj_srs == self.proj_location:  # TODO: do it better
            for bbox_item, region_item in bbox_region_items.items():
                bbox[bbox_item] = self.region[region_item]

        # if location projection and wms query projection are
        # different, corner points of region are transformed into wms
        # projection and then bbox is created from extreme coordinates
        # of the transformed points
        else:
            for bbox_item, region_item in bbox_region_items.items():
                bbox[bbox_item] = None

            temp_region = self._tempfile()

            try:
                temp_region_opened = open(temp_region, 'w')
                temp_region_opened.write(
                    "%f %f\n%f %f\n%f %f\n%f %f\n" %
                    (self.region['e'], self.region['n'], self.region['w'],
                     self.region['n'], self.region['w'], self.region['s'],
                     self.region['e'], self.region['s']))
            except IOError:
                grass.fatal(_("Unable to write data into tempfile"))
            finally:
                temp_region_opened.close()

            points = grass.read_command('m.proj',
                                        flags='d',
                                        proj_out=self.proj_srs,
                                        proj_in=self.proj_location,
                                        input=temp_region,
                                        quiet=True)  # TODO: stdin
            grass.try_remove(temp_region)
            if not points:
                grass.fatal(
                    _("Unable to determine region, %s failed") % 'm.proj')

            points = points.splitlines()
            if len(points) != 4:
                grass.fatal(_("Region definition: 4 points required"))

            for point in points:
                try:
                    point = list(map(float, point.split("|")))
                except ValueError:
                    grass.fatal(
                        _('Reprojection of region using m.proj failed.'))
                if not bbox['maxy']:
                    bbox['maxy'] = point[1]
                    bbox['miny'] = point[1]
                    bbox['maxx'] = point[0]
                    bbox['minx'] = point[0]
                    continue

                if bbox['maxy'] < point[1]:
                    bbox['maxy'] = point[1]
                elif bbox['miny'] > point[1]:
                    bbox['miny'] = point[1]

                if bbox['maxx'] < point[0]:
                    bbox['maxx'] = point[0]
                elif bbox['minx'] > point[0]:
                    bbox['minx'] = point[0]

        self._debug("_computeBbox", "finished -> %s" % bbox)

        # Ordering of coordinates axis of geographic coordinate
        # systems in WMS 1.3.0 is flipped. If  self.tile_size['flip_coords'] is
        # True, coords in bbox need to be flipped in WMS query.

        return bbox
Example #56
0
def main():
    mapname = options['map']
    option = options['option']
    layer = options['layer']
    units = options['units']

    nuldev = file(os.devnull, 'w')

    if not grass.find_file(mapname, 'vector')['file']:
        grass.fatal(_("Vector map <%s> not found") % mapname)

    if int(layer) in grass.vector_db(mapname):
        colnames = grass.vector_columns(mapname,
                                        layer,
                                        getDict=False,
                                        stderr=nuldev)
        isConnection = True
    else:
        isConnection = False
        colnames = ['cat']

    if option == 'coor':
        extracolnames = ['x', 'y', 'z']
    else:
        extracolnames = [option]

    if units == 'percent':
        unitsp = 'meters'
    elif units:
        unitsp = units
    else:
        unitsp = None

    # NOTE: we suppress -1 cat and 0 cat
    if isConnection:
        f = grass.vector_db(map=mapname)[int(layer)]
        p = grass.pipe_command('v.db.select',
                               quiet=True,
                               map=mapname,
                               layer=layer)
        records1 = []
        catcol = -1
        for line in p.stdout:
            cols = line.rstrip('\r\n').split('|')
            if catcol == -1:
                for i in range(0, len(cols)):
                    if cols[i] == f['key']:
                        catcol = i
                        break
                if catcol == -1:
                    grass.fatal(
                        _("There is a table connected to input vector map '%s', but "
                          "there is no key column '%s'.") %
                        (mapname, f['key']))
                continue
            if cols[catcol] == '-1' or cols[catcol] == '0':
                continue
            records1.append(cols[:catcol] + [int(cols[catcol])] +
                            cols[(catcol + 1):])
        p.wait()
        if p.returncode != 0:
            sys.exit(1)

        records1.sort(key=lambda r: r[catcol])

        if len(records1) == 0:
            try:
                grass.fatal(
                    _("There is a table connected to input vector map '%s', but "
                      "there are no categories present in the key column '%s'. Consider using "
                      "v.to.db to correct this.") % (mapname, f['key']))
            except KeyError:
                pass

        # fetch the requested attribute sorted by cat:
        p = grass.pipe_command('v.to.db',
                               flags='p',
                               quiet=True,
                               map=mapname,
                               option=option,
                               layer=layer,
                               units=unitsp)
        records2 = []
        for line in p.stdout:
            fields = line.rstrip('\r\n').split('|')
            if fields[0] in ['cat', '-1', '0']:
                continue
            records2.append([int(fields[0])] + fields[1:])
        p.wait()
        records2.sort()

        # make pre-table
        # len(records1) may not be the same as len(records2) because
        # v.db.select can return attributes that are not linked to features.
        records3 = []
        for r2 in records2:
            records3.append(
                filter(lambda r1: r1[catcol] == r2[0], records1)[0] + r2[1:])
    else:
        catcol = 0
        records1 = []
        p = grass.pipe_command('v.category',
                               inp=mapname,
                               layer=layer,
                               option='print')
        for line in p.stdout:
            field = int(line.rstrip())
            if field > 0:
                records1.append(field)
        p.wait()
        records1.sort()
        records1 = uniq(records1)

        # make pre-table
        p = grass.pipe_command('v.to.db',
                               flags='p',
                               quiet=True,
                               map=mapname,
                               option=option,
                               layer=layer,
                               units=unitsp)
        records3 = []
        for line in p.stdout:
            fields = line.rstrip('\r\n').split('|')
            if fields[0] in ['cat', '-1', '0']:
                continue
            records3.append([int(fields[0])] + fields[1:])
        p.wait()
        records3.sort()

    # print table header
    sys.stdout.write('|'.join(colnames + extracolnames) + '\n')

    # make and print the table:
    numcols = len(colnames) + len(extracolnames)

    # calculate percents if requested
    if units == 'percent' and option != 'coor':
        # calculate total value
        total = 0
        for r in records3:
            total += float(r[-1])

        # calculate percentages
        records4 = [float(r[-1]) * 100 / total for r in records3]
        if type(records1[0]) == int:
            records3 = [[r1] + [r4] for r1, r4 in zip(records1, records4)]
        else:
            records3 = [r1 + [r4] for r1, r4 in zip(records1, records4)]

    # sort results
    if options['sort']:
        if options['sort'] == 'asc':
            if options['option'] == 'coor':
                records3.sort(
                    key=lambda r: (float(r[-3]), float(r[-2]), float(r[-1])))
            else:
                records3.sort(key=lambda r: float(r[-1]))
        else:
            if options['option'] == 'coor':
                records3.sort(key=lambda r:
                              (float(r[-3]), float(r[-2]), float(r[-1])),
                              reverse=True)
            else:
                records3.sort(key=lambda r: float(r[-1]), reverse=True)

    for r in records3:
        sys.stdout.write('|'.join(map(str, r)) + '\n')
Example #57
0
    def _reprojectMap(self):
        """!Reproject data  using gdalwarp if needed
        """
        # reprojection of raster
        do_reproject = True
        if self.source_epsg is not None and self.target_epsg is not None \
            and self.source_epsg == self.target_epsg:
            do_reproject = False
        # TODO: correctly compare source and target crs
        if do_reproject == True and self.proj_srs == self.proj_location:
            do_reproject = False
        if do_reproject:
            grass.message(_("Reprojecting raster..."))
            self.temp_warpmap = grass.tempfile() + '.tif'

            if int(os.getenv('GRASS_VERBOSE', '2')) <= 2:
                nuldev = open(os.devnull, 'w+')
            else:
                nuldev = None

            if self.params['method'] == "nearest":
                gdal_method = "near"
            elif self.params['method'] == "linear":
                gdal_method = "bilinear"
            else:
                gdal_method = self.params['method']

            # RGB rasters - alpha layer is added for cropping edges of projected raster
            try:
                if self.temp_map_bands_num == 3:
                    ps = grass.Popen([
                        'gdalwarp', '-s_srs',
                        '%s' % self.proj_srs, '-t_srs',
                        '%s' % self.proj_location, '-r', gdal_method,
                        '-dstalpha', self.temp_map, self.temp_warpmap
                    ],
                                     stdout=nuldev)
                # RGBA rasters
                else:
                    ps = grass.Popen([
                        'gdalwarp', '-s_srs',
                        '%s' % self.proj_srs, '-t_srs',
                        '%s' % self.proj_location, '-r', gdal_method,
                        self.temp_map, self.temp_warpmap
                    ],
                                     stdout=nuldev)
                ps.wait()
            except OSError as e:
                grass.fatal('%s \nThis can be caused by missing %s utility. ' %
                            (e, 'gdalwarp'))

            if nuldev:
                nuldev.close()

            if ps.returncode != 0:
                grass.fatal(_('%s failed') % 'gdalwarp')
            grass.try_remove(self.temp_map)
        # raster projection is same as projection of location
        else:
            self.temp_warpmap = self.temp_map
            self.temp_files_to_cleanup.remove(self.temp_map)

        return self.temp_warpmap
Example #58
0
    def _initializeParameters(self, options, flags):
        self._debug("_initialize_parameters", "started")

        # initialization of module parameters (options, flags)
        self.params['driver'] = options['driver']
        drv_info = WMSDriversInfo()

        driver_props = drv_info.GetDrvProperties(options['driver'])
        self._checkIgnoeredParams(options, flags, driver_props)

        self.params['capfile'] = options['capfile'].strip()

        for key in ['url', 'layers', 'styles', 'method']:
            self.params[key] = options[key].strip()

        self.flags = flags

        if self.flags['o']:
            self.params['transparent'] = 'FALSE'
        else:
            self.params['transparent'] = 'TRUE'

        for key in ['password', 'username', 'urlparams']:
            self.params[key] = options[key]

        if (self.params ['password'] and self.params ['username'] == '') or \
           (self.params['password'] == '' and self.params['username']):
            grass.fatal(
                _("Please insert both %s and %s parameters or none of them." %
                  ('password', 'username')))

        self.params['bgcolor'] = options['bgcolor'].strip()

        if options['format'] == "jpeg" and \
           not 'format' in driver_props['ignored_params']:
            if not flags['o'] and \
                    'WMS' in self.params['driver']:
                grass.warning(_("JPEG format does not support transparency"))

        self.params['format'] = drv_info.GetFormat(options['format'])
        if not self.params['format']:
            self.params['format'] = self.params['format']

        # TODO: get srs from Tile Service file in OnEarth_GRASS driver
        self.params['srs'] = int(options['srs'])
        if self.params['srs'] <= 0 and not 'srs' in driver_props[
                'ignored_params']:
            grass.fatal(_("Invalid EPSG code %d") % self.params['srs'])

        self.params['wms_version'] = options['wms_version']
        if "CRS" in GetSRSParamVal(
                self.params['srs']) and self.params['wms_version'] == "1.1.1":
            self.params['wms_version'] = "1.3.0"
            grass.warning(
                _("WMS version <1.3.0> will be used, because version <1.1.1> does not support <%s>projection"
                  ) % GetSRSParamVal(self.params['srs']))

        if self.params['wms_version'] == "1.3.0":
            self.params['proj_name'] = "CRS"
        else:
            self.params['proj_name'] = "SRS"

        # read projection info
        self.proj_location = grass.read_command('g.proj',
                                                flags='jf').rstrip('\n')
        self.proj_location = self._modifyProj(self.proj_location)

        self.source_epsg = str(GetEpsg(self.params['srs']))
        self.target_epsg = None
        target_crs = grass.parse_command('g.proj', flags='g', delimiter='=')
        if 'epsg' in target_crs.keys():
            self.target_epsg = target_crs['epsg']
            if self.source_epsg != self.target_epsg:
                grass.warning(
                    _("SRS differences: WMS source EPSG %s != location EPSG %s (use srs=%s to adjust)"
                      ) %
                    (self.source_epsg, self.target_epsg, self.target_epsg))

        self.proj_srs = grass.read_command('g.proj',
                                           flags='jf',
                                           epsg=str(GetEpsg(
                                               self.params['srs'])))
        self.proj_srs = self.proj_srs.rstrip('\n')

        self.proj_srs = self._modifyProj(self.proj_srs)

        if not self.proj_srs or not self.proj_location:
            grass.fatal(_("Unable to get projection info"))

        self.region = options['region']

        min_tile_size = 100
        maxcols = int(options['maxcols'])
        if maxcols <= min_tile_size:
            grass.fatal(_("Maxcols must be greater than 100"))

        maxrows = int(options['maxrows'])
        if maxrows <= min_tile_size:
            grass.fatal(_("Maxrows must be greater than 100"))

        # setting optimal tile size according to maxcols and maxrows constraint
        # and region cols and rows
        self.tile_size['cols'] = int(
            self.region['cols'] / ceil(self.region['cols'] / float(maxcols)))
        self.tile_size['rows'] = int(
            self.region['rows'] / ceil(self.region['rows'] / float(maxrows)))

        # default format for GDAL library
        self.gdal_drv_format = "GTiff"

        self._debug("_initialize_parameters", "finished")
Example #59
0
def main():

    global insert_sql
    insert_sql = None
    global temporary_vect
    temporary_vect = None
    global stats_temp_file
    stats_temp_file = None

    segment_map = options['map']
    csvfile = options['csvfile'] if options['csvfile'] else []
    vectormap = options['vectormap'] if options['vectormap'] else []
    global rasters
    rasters = options['rasters'].split(',') if options['rasters'] else []
    area_measures = options['area_measures'].split(',') if (
        options['area_measures'] and not flags['s']) else []
    if area_measures:
        if not gscript.find_program('r.object.geometry', '--help'):
            message = _(
                "You need to install the addon r.object.geometry to be able")
            message += _(" to calculate area measures.\n")
            message += _(
                " You can install the addon with 'g.extension r.object.geometry'"
            )
            gscript.fatal(message)
    neighborhood = True if flags['n'] else False
    if neighborhood:
        if not gscript.find_program('r.neighborhoodmatrix', '--help'):
            message = _(
                "You need to install the addon r.neighborhoodmatrix to be able"
            )
            message += _(" to calculate area measures.\n")
            message += _(
                " You can install the addon with 'g.extension r.neighborhoodmatrix'"
            )
            gscript.fatal(message)

    raster_statistics = options['raster_statistics'].split(
        ',') if options['raster_statistics'] else []
    separator = gscript.separator(options['separator'])
    processes = int(options['processes'])

    output_header = ['cat']
    output_dict = collections.defaultdict(list)

    raster_stat_dict = {
        'zone': 0,
        'min': 4,
        'third_quart': 16,
        'max': 5,
        'sum': 12,
        'null_cells': 3,
        'median': 15,
        'label': 1,
        'first_quart': 14,
        'range': 6,
        'mean_of_abs': 8,
        'stddev': 9,
        'non_null_cells': 2,
        'coeff_var': 11,
        'variance': 10,
        'sum_abs': 13,
        'perc_90': 17,
        'mean': 7
    }

    geometry_stat_dict = {
        'cat': 0,
        'area': 1,
        'perimeter': 2,
        'compact_square': 3,
        'compact_circle': 4,
        'fd': 5,
        'xcoords': 6,
        'ycoords': 7
    }

    if flags['r']:
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=segment_map)

    stats_temp_file = gscript.tempfile()
    if area_measures:
        gscript.message(_("Calculating geometry statistics..."))
        output_header += area_measures
        stat_indices = [geometry_stat_dict[x] for x in area_measures]
        gscript.run_command('r.object.geometry',
                            input_=segment_map,
                            output=stats_temp_file,
                            overwrite=True,
                            quiet=True)

        firstline = True
        with open(stats_temp_file, 'r') as fin:
            for line in fin:
                if firstline:
                    firstline = False
                    continue
                values = line.rstrip().split('|')
                output_dict[values[0]] = [values[x] for x in stat_indices]

    if rasters:
        if not flags['c']:
            gscript.message(_("Checking usability of raster maps..."))
            for raster in rasters:
                if not gscript.find_file(raster, element='cell')['name']:
                    gscript.message(_("Cannot find raster '%s'" % raster))
                    gscript.message(_("Removing this raster from list."))
                    rasters.remove(raster)
                raster_info = gscript.parse_command('r.univar',
                                                    flags='g',
                                                    map_=raster,
                                                    quiet=True)
                if len(raster_info) == 0 or int(raster_info['null_cells']) > 0:
                    message = 'Raster %s contains null values.\n' % raster
                    message += 'This can lead to errors in the calculations.\n'
                    message += 'Check region settings and raster extent.\n'
                    message += 'Possibly fill null values of raster.\n'
                    message += 'Removing this raster from list.'
                    gscript.warning(message)
                    while raster in rasters:
                        rasters.remove(raster)
                    continue

        if len(rasters) > 0:
            gscript.message(_("Calculating statistics for raster maps..."))
            if len(rasters) < processes:
                processes = len(rasters)
                gscript.message(
                    _("Only one process per raster. Reduced number of processes to %i."
                      % processes))

            stat_indices = [raster_stat_dict[x] for x in raster_statistics]
            pool = Pool(processes)
            func = partial(worker, segment_map, stats_temp_file)
            pool.map(func, rasters)
            pool.close()
            pool.join()

            for raster in rasters:
                rastername = raster.split('@')[0]
                rastername = rastername.replace('.', '_')
                temp_file = stats_temp_file + '.' + rastername
                output_header += [
                    rastername + "_" + x for x in raster_statistics
                ]
                firstline = True
                with open(temp_file, 'r') as fin:
                    for line in fin:
                        if firstline:
                            firstline = False
                            continue
                        values = line.rstrip().split('|')
                        output_dict[values[0]] = output_dict[values[0]] + [
                            values[x] for x in stat_indices
                        ]

    # Calculating neighborhood statistics if requested
    if neighborhood:

        gscript.message(_("Calculating neighborhood statistics..."))

        # Add neighbordhood statistics to headers
        original_nb_values = len(output_header) - 1
        new_headers = ['neighbors_count']
        for i in range(1, len(output_header)):
            new_headers.append('%s_nbrmean' % output_header[i])
            new_headers.append('%s_nbrstddev' % output_header[i])

        output_header += new_headers

        # Get sorted neighborhood matrix
        nbr_matrix = sorted([
            x.split('|')
            for x in gscript.read_command('r.neighborhoodmatrix',
                                          input_=segment_map,
                                          flags='d',
                                          quiet=True).splitlines()
        ])

        # Calculate mean and stddev of neighbor values for each variable in the
        # output_dict
        for key, group in groupby(nbr_matrix, lambda x: x[0]):
            d = {}
            for i in range(original_nb_values):
                d[i] = (0, 0, 0)
            nbrlist = [str(x[1]) for x in group]
            if len(nbrlist) > 1:
                for nbr in nbrlist:
                    for i in range(original_nb_values):
                        d[i] = update(d[i], float(output_dict[nbr][i]))
                output_dict[key] = output_dict[key] + [str(len(nbrlist))]
                output_dict[key] = output_dict[key] + [
                    str(i) for sub in [finalize(x) for x in d.values()]
                    for i in sub
                ]
            else:
                newvalues = ['1']
                nbr = nbrlist[0]
                for i in range(original_nb_values):
                    newvalues.append(output_dict[nbr][i])
                    newvalues.append('0')
                output_dict[key] = output_dict[key] + newvalues

    message = _("Some values could not be calculated for the objects below. ")
    message += _("These objects are thus not included in the results. ")
    message += _("HINT: Check some of the raster maps for null values ")
    message += _("and possibly fill these values with r.fillnulls.")
    error_objects = []

    if csvfile:
        with open(csvfile, 'wb') as f:
            f.write(separator.join(output_header) + "\n")
            for key in output_dict:
                if len(output_dict[key]) + 1 == len(output_header):
                    f.write(key + separator +
                            separator.join(output_dict[key]) + "\n")
                else:
                    error_objects.append(key)
        f.close()

    if vectormap:
        gscript.message(_("Creating output vector map..."))
        temporary_vect = 'segmstat_tmp_vect_%d' % os.getpid()
        gscript.run_command('r.to.vect',
                            input_=segment_map,
                            output=temporary_vect,
                            type_='area',
                            flags='vt',
                            overwrite=True,
                            quiet=True)

        insert_sql = gscript.tempfile()
        fsql = open(insert_sql, 'w')
        fsql.write('BEGIN TRANSACTION;\n')
        if gscript.db_table_exist(temporary_vect):
            if gscript.overwrite():
                fsql.write('DROP TABLE %s;' % temporary_vect)
            else:
                gscript.fatal(
                    _("Table %s already exists. Use --o to overwrite" %
                      temporary_vect))
        create_statement = 'CREATE TABLE ' + temporary_vect + ' (cat int PRIMARY KEY);\n'
        fsql.write(create_statement)
        for header in output_header[1:]:
            addcol_statement = 'ALTER TABLE %s ADD COLUMN %s double precision;\n' % (
                temporary_vect, header)
            fsql.write(addcol_statement)
        for key in output_dict:
            if len(output_dict[key]) + 1 == len(output_header):
                sql = "INSERT INTO %s VALUES (%s, %s);\n" % (
                    temporary_vect, key, ",".join(output_dict[key]))
                sql = sql.replace('inf', 'NULL')
                sql = sql.replace('nan', 'NULL')
                fsql.write(sql)
            else:
                if not csvfile:
                    error_objects.append(key)

        fsql.write('END TRANSACTION;')
        fsql.close()

        gscript.run_command('db.execute', input=insert_sql, quiet=True)
        gscript.run_command('v.db.connect',
                            map_=temporary_vect,
                            table=temporary_vect,
                            quiet=True)
        gscript.run_command('g.copy',
                            vector="%s,%s" % (temporary_vect, vectormap),
                            quiet=True)

    if error_objects:
        object_string = ', '.join(error_objects[:100])
        message += _(
            "\n\nObjects with errors (only first 100 are shown):\n%s" %
            object_string)
        gscript.message(message)
Example #60
0
def raster_exists(envlay):
    """Check if the raster map exists, call GRASS fatal otherwise"""
    for chl in range(len(envlay)):
        ffile = gs.find_file(envlay[chl], element="cell")
        if not ffile["fullname"]:
            gs.fatal(_("The layer {} does not exist".format(envlay[chl])))