def ppmtopng(dst, src): if grass.find_program("g.ppmtopng", "--help"): grass.run_command("g.ppmtopng", input=src, output=dst, quiet=True) elif grass.find_program("pnmtopng"): fh = open(dst, "wb") grass.call(["pnmtopng", src], stdout=fh) fh.close() elif grass.find_program("convert"): grass.call(["convert", src, dst]) else: grass.fatal(_("Cannot find g.ppmtopng, pnmtopng or convert"))
def ppmtopng(dst, src): if grass.find_program("g.ppmtopng", '--help'): grass.run_command('g.ppmtopng', input = src, output = dst, quiet = True) elif grass.find_program("pnmtopng"): fh = open(dst, 'wb') grass.call(["pnmtopng", src], stdout = fh) fh.close() elif grass.find_program("convert"): grass.call(["convert", src, dst]) else: grass.fatal(_("Cannot find g.ppmtopng, pnmtopng or convert"))
def clip_multiple_raster(raster_name_list, output_suffix='clip', overwrite=False, resample=False, n_jobs=2): """ Define the function to clip a collection of rasters. Please be carefull that the clip will be based on region extend and pixels under MASK will be null. Please take care of well defining the computational region and a MASK if desired before calling the function. """ global suffix, o, r o = overwrite r = resample suffix = output_suffix # Check if i.segment.stats is well installed if not gscript.find_program('r.clip', '--help'): message = _("You first need to install the addon r.clip.\n") message += _(" You can install the addon with 'g.extension r.clip'") gscript.fatal(message) # Clip the rasters in multiprocessing pool of jobs p = Pool(n_jobs) output = p.map( clip, raster_name_list ) # Launch the processes for as many items in the list (if function with a return, the returned results are ordered thanks to 'map' function) p.close() p.join() print "\n".join(output)
def check_progs(): found_missing = False if not grass.find_program('v.mc.py'): found_missing = True grass.warning(_("'%s' required. Please install '%s' first \n using 'g.extension %s' or check \n PATH and GRASS_ADDON_PATH variables") % ('v.mc.py','v.mc.py','v.mc.py')) if found_missing: grass.fatal(_("An ERROR occurred running <v.ldm.py>"))
def main(): # check if we have i.sentinel.mask if not grass.find_program('i.sentinel.mask', '--help'): grass.fatal( _("The 'i.sentinel.mask' module was not found, install it first:") + "\n" + "g.extension i.sentinel") # set some common environmental variables, like: os.environ.update( dict(GRASS_COMPRESS_NULLS='1', GRASS_COMPRESSOR='ZSTD', GRASS_MESSAGE_FORMAT='plain')) # actual mapset, location, ... env = grass.gisenv() gisdbase = env['GISDBASE'] location = env['LOCATION_NAME'] old_mapset = env['MAPSET'] new_mapset = options['newmapset'] grass.message("New mapset: <%s>" % new_mapset) grass.utils.try_rmdir(os.path.join(gisdbase, location, new_mapset)) # create a private GISRC file for each job gisrc = os.environ['GISRC'] newgisrc = "%s_%s" % (gisrc, str(os.getpid())) grass.try_remove(newgisrc) shutil.copyfile(gisrc, newgisrc) os.environ['GISRC'] = newgisrc ### change mapset grass.message("GISRC: <%s>" % os.environ['GISRC']) grass.run_command('g.mapset', flags='c', mapset=new_mapset) ### import data grass.message(_("Running i.sentinel.mask ...")) kwargs = dict() for opt, val in options.items(): if opt != 'newmapset' and val: if opt in [ 'green', 'red', 'blue', 'nir', 'nir8a', 'swir11', 'swir12' ]: valnew = val.split('@')[0] grass.run_command('g.copy', raster="%s,%s" % (val, valnew), quiet=True) kwargs[opt] = valnew else: kwargs[opt] = val flagstr = '' for flag, val in flags.items(): if val: flagstr += flag grass.run_command('g.region', raster=kwargs['nir']) grass.run_command('i.sentinel.mask', quiet=True, flags=flagstr, **kwargs) grass.utils.try_remove(newgisrc) return 0
def copyOfInitMap(map_, width, height): """Initialize map display, set dimensions and map region """ if not grass.find_program('g.region', '--help'): sys.exit(_("GRASS module '%s' not found. Unable to start map " "display window.") % 'g.region') map_.ChangeMapSize((width, height)) map_.region = map_.GetRegion() # g.region -upgc
def check_progs(): found_missing = False for prog in ['r.neighborhoodmatrix']: if not gscript.find_program(prog, '--help'): found_missing = True gscript.warning(_("'%s' required. Please install '%s' first using 'g.extension %s'") % (prog, prog, prog)) if found_missing: gscript.fatal(_("An ERROR occurred running i.segment.uspo"))
def check_progs(): found_missing = False for prog in ('r.hypso', 'r.stream.basins', 'r.stream.distance', 'r.stream.extract', 'r.stream.order','r.stream.snap','r.stream.stats', 'r.width.funct'): if not grass.find_program(prog, '--help'): found_missing = True grass.warning(_("'%s' required. Please install '%s' first using 'g.extension %s'") % (prog, prog, prog)) if found_missing: grass.fatal(_("An ERROR occurred running r.basin"))
def check_progs(): found_missing = False prog = "r.stream.distance" if not grass.find_program(prog, "--help"): found_missing = True grass.warning( _("'%s' required. Please install '%s' first using 'g.extension %s'" ) % (prog, prog, prog)) if found_missing: grass.fatal(_("An ERROR occurred running r.lfp"))
def main(): input = options['input'] proctype = options['proctype'] output = options['output'] band = options['band'] #check whether gdalwarp is in path and executable if not grass.find_program('gdalwarp', '--help'): grass.fatal(_("gdalwarp is not in the path and executable")) #create temporary file to hold gdalwarp output before importing to GRASS tempfile = grass.read_command("g.tempfile", pid=os.getpid()).strip() + '.tif' #get projection information for current GRASS location proj = grass.read_command('g.proj', flags='jf').strip() #currently only runs in projected location if "XY location" in proj: grass.fatal( _("This module needs to be run in a projected location (found: %s)" ) % proj) #process list of bands allbands = [ '1', '2', '3n', '3b', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14' ] if band == 'all': bandlist = allbands else: bandlist = band.split(',') #initialize datasets for L1A and L1B if proctype in ["L1A", "L1B"]: for band in bandlist: if band in allbands: dataset = bands[proctype][band] srcfile = "HDF4_EOS:EOS_SWATH:%s:%s" % (input, dataset) import_aster(proj, srcfile, tempfile, band) else: grass.fatal( _('band %s is not an available Terra/ASTER band') % band) elif proctype == "DEM": srcfile = input import_aster(proj, srcfile, tempfile, "DEM") #cleanup grass.message(_("Cleaning up ...")) grass.try_remove(tempfile) grass.message(_("Done.")) return
def main(): input = options['input'] proctype = options['proctype'] output = options['output'] band = options['band'] #check whether gdalwarp is in path and executable if not grass.find_program('gdalwarp', ['--version']): grass.fatal(_("gdalwarp is not in the path and executable")) #create temporary file to hold gdalwarp output before importing to GRASS tempfile = grass.read_command("g.tempfile", pid = os.getpid()).strip() + '.tif' #get projection information for current GRASS location proj = grass.read_command('g.proj', flags = 'jf').strip() #currently only runs in projected location if "XY location" in proj: grass.fatal(_("This module needs to be run in a projected location (found: %s)") % proj) #process list of bands allbands = ['1','2','3n','3b','4','5','6','7','8','9','10','11','12','13','14'] if band == 'all': bandlist = allbands else: bandlist = band.split(',') #initialize datasets for L1A and L1B if proctype in ["L1A", "L1B"]: for band in bandlist: if band in allbands: dataset = bands[proctype][band] srcfile = "HDF4_EOS:EOS_SWATH:%s:%s" % (input, dataset) import_aster(proj, srcfile, tempfile, band) else: grass.fatal(_('band %s is not an available Terra/ASTER band') % band) elif proctype == "DEM": srcfile = input import_aster(proj, srcfile, tempfile, "DEM") #cleanup grass.message(_("Cleaning up ...")) grass.try_remove(tempfile) grass.message(_("Done.")) return
def main(): input = options["input"] proctype = options["proctype"] output = options["output"] band = options["band"] # check whether gdalwarp is in path and executable if not grass.find_program("gdalwarp", "--help"): grass.fatal(_("gdalwarp is not in the path and executable")) # create temporary file to hold gdalwarp output before importing to GRASS tempfile = grass.read_command("g.tempfile", pid=os.getpid()).strip() + ".tif" # get projection information for current GRASS location proj = grass.read_command("g.proj", flags="jf").strip() # currently only runs in projected location if "XY location" in proj: grass.fatal(_("This module needs to be run in a projected location (found: %s)") % proj) # process list of bands allbands = ["1", "2", "3n", "3b", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14"] if band == "all": bandlist = allbands else: bandlist = band.split(",") # initialize datasets for L1A and L1B if proctype in ["L1A", "L1B"]: for band in bandlist: if band in allbands: dataset = bands[proctype][band] srcfile = "HDF4_EOS:EOS_SWATH:%s:%s" % (input, dataset) import_aster(proj, srcfile, tempfile, output, band) else: grass.fatal(_("band %s is not an available Terra/ASTER band") % band) elif proctype == "DEM": srcfile = input import_aster(proj, srcfile, tempfile, output, "DEM") # cleanup grass.message(_("Cleaning up ...")) grass.try_remove(tempfile) grass.message(_("Done.")) return
def sortfile(infile, outfile): inf = file(infile, 'r') outf = file(outfile, 'w') if gscript.find_program('sort', '--help'): gscript.run_command('sort', flags='n', stdin=inf, stdout=outf) else: # FIXME: we need a large-file sorting function gscript.warning(_("'sort' not found: sorting in memory")) lines = inf.readlines() for i in range(len(lines)): lines[i] = float(lines[i].rstrip('\r\n')) lines.sort() for line in lines: outf.write(str(line) + '\n') inf.close() outf.close()
def sortfile(infile, outfile): inf = open(infile, "r") outf = open(outfile, "w") if gscript.find_program("sort", "--help"): gscript.run_command("sort", flags="n", stdin=inf, stdout=outf) else: # FIXME: we need a large-file sorting function gscript.warning(_("'sort' not found: sorting in memory")) lines = inf.readlines() for i in range(len(lines)): lines[i] = float(lines[i].rstrip("\r\n")) lines.sort() for line in lines: outf.write(str(line) + "\n") inf.close() outf.close()
def main(): global tile, tmpdir, in_temp in_temp = False input = options['input'] output = options['output'] one = flags['1'] #are we in LatLong location? s = grass.read_command("g.proj", flags='j') kv = grass.parse_key_val(s) if kv['+proj'] != 'longlat': grass.fatal(_("This module only operates in LatLong locations")) # use these from now on: infile = input while infile[-4:].lower() in ['.hgt', '.zip']: infile = infile[:-4] (fdir, tile) = os.path.split(infile) if not output: tileout = tile else: tileout = output zipfile = infile + ".hgt.zip" hgtfile = os.path.join(fdir, tile[:7] + ".hgt") if os.path.isfile(zipfile): #### check if we have unzip if not grass.find_program('unzip'): grass.fatal(_('The "unzip" program is required, please install it first')) # really a ZIP file? # make it quiet in a safe way (just in case -qq isn't portable) tenv = os.environ.copy() tenv['UNZIP'] = '-qq' if grass.call(['unzip', '-t', zipfile], env = tenv) != 0: grass.fatal(_("'%s' does not appear to be a valid zip file.") % zipfile) is_zip = True elif os.path.isfile(hgtfile): # try and see if it's already unzipped is_zip = False else: grass.fatal(_("File '%s' or '%s' not found") % (zipfile, hgtfile)) #make a temporary directory tmpdir = grass.tempfile() grass.try_remove(tmpdir) os.mkdir(tmpdir) if is_zip: shutil.copyfile(zipfile, os.path.join(tmpdir, tile + ".hgt.zip")) else: shutil.copyfile(hgtfile, os.path.join(tmpdir, tile + ".hgt")) #change to temporary directory os.chdir(tmpdir) in_temp = True zipfile = tile + ".hgt.zip" hgtfile = tile[:7] + ".hgt" bilfile = tile + ".bil" if is_zip: #unzip & rename data file: grass.message(_("Extracting '%s'...") % infile) if grass.call(['unzip', zipfile], env = tenv) != 0: grass.fatal(_("Unable to unzip file.")) grass.message(_("Converting input file to BIL...")) os.rename(hgtfile, bilfile) north = tile[0] ll_latitude = int(tile[1:3]) east = tile[3] ll_longitude = int(tile[4:7]) # are we on the southern hemisphere? If yes, make LATITUDE negative. if north == "S": ll_latitude *= -1 # are we west of Greenwich? If yes, make LONGITUDE negative. if east == "W": ll_longitude *= -1 # Calculate Upper Left from Lower Left ulxmap = "%.1f" % ll_longitude # SRTM90 tile size is 1 deg: ulymap = "%.1f" % (ll_latitude + 1) if not one: tmpl = tmpl3sec else: grass.message(_("Attempting to import 1-arcsec data.")) tmpl = tmpl1sec header = tmpl % (ulxmap, ulymap) hdrfile = tile + '.hdr' outf = file(hdrfile, 'w') outf.write(header) outf.close() #create prj file: To be precise, we would need EGS96! But who really cares... prjfile = tile + '.prj' outf = file(prjfile, 'w') outf.write(proj) outf.close() try: grass.run_command('r.in.gdal', input=bilfile, out=tileout) except: grass.fatal(_("Unable to import data")) # nice color table grass.run_command('r.colors', map = tileout, color = 'srtm') # write cmd history: grass.raster_history(tileout) grass.message(_("Done: generated map ") + tileout) grass.message(_("(Note: Holes in the data can be closed with 'r.fillnulls' using splines)"))
def main(): # parameters infile = options['input'] raster_reference = options['raster_reference'] raster_file = options['raster_file'] outfile = options['output'] resolution = options['resolution'] method = options['method'] zrange = options['zrange'] zscale = options['zscale'] output_type = options['type'] percent = options['percent'] pth = options['pth'] trim = options['trim'] footprint = options['footprint'] # flags scan = flags['s'] shell_script_style = flags['g'] # overwrite auf true setzen os.environ['GRASS_OVERWRITE'] = '1' # to hide non-error messages from subprocesses if grass.verbosity() <= 2: outdev = open(os.devnull, 'w') else: outdev = sys.stdout # use temporary region grass.use_temp_region() # scan -s or shell_script_style -g: if scan: if not grass.find_program( options['pdal_cmd'].split(' ')[0], ' '.join(options['pdal_cmd'].split(' ')[1:]) + ' info --summary'): grass.fatal( _("The pdal program is not in the path " + "and executable. Please install first")) command_scan = options['pdal_cmd'].split(' ') command_scan.extend(['info', '--summary', infile]) tmp_scan = grass.tempfile() if tmp_scan is None: grass.fatal("Unable to create temporary files") fh = open(tmp_scan, 'wb') summary = True if grass.call(command_scan, stdout=fh) != 0: fh.close() command_scan = options['pdal_cmd'].split(' ') command_scan.extend(['info', infile]) fh2 = open(tmp_scan, 'wb') if grass.call(command_scan, stdout=fh2) != 0: os.remove(tmp_scan) grass.fatal( _("pdal cannot determine metadata " + "for unsupported format of <%s>") % infile) fh2.close() else: fh2.close() summary = False else: fh.close() data = json.load(open(tmp_scan)) if summary: str1 = u'summary' str2 = u'bounds' y_str = u'Y' x_str = u'X' z_str = u'Z' min_str = u'min' max_str = u'max' try: n = str(data[str1][str2][y_str][max_str]) s = str(data[str1][str2][y_str][min_str]) w = str(data[str1][str2][x_str][min_str]) e = str(data[str1][str2][x_str][max_str]) t = str(data[str1][str2][z_str][max_str]) b = str(data[str1][str2][z_str][min_str]) except: ymin_str = u'miny' xmin_str = u'minx' zmin_str = u'minz' ymax_str = u'maxy' xmax_str = u'maxx' zmax_str = u'maxz' n = str(data[str1][str2][ymax_str]) s = str(data[str1][str2][ymin_str]) w = str(data[str1][str2][xmin_str]) e = str(data[str1][str2][xmax_str]) t = str(data[str1][str2][zmax_str]) b = str(data[str1][str2][zmin_str]) else: str1 = u'stats' str2 = u'bbox' str3 = u'native' str4 = u'bbox' n = str(data[str1][str2][str3][str4][u'maxy']) s = str(data[str1][str2][str3][str4][u'miny']) w = str(data[str1][str2][str3][str4][u'minx']) e = str(data[str1][str2][str3][str4][u'maxx']) t = str(data[str1][str2][str3][str4][u'maxz']) b = str(data[str1][str2][str3][str4][u'minz']) if not shell_script_style: grass.message( _("north: %s\nsouth: %s\nwest: %s\neast: %s\ntop: %s\nbottom: %s" ) % (n, s, w, e, t, b)) else: grass.message( _("n=%s s=%s w=%s e=%s t=%s b=%s") % (n, s, w, e, t, b)) elif footprint: footprint_to_vectormap(infile, footprint) else: # get region with pdal footprint_to_vectormap(infile, 'tiles') if raster_file: raster_reference = 'img' + str(os.getpid()) grass.run_command('r.external', input=raster_file, flags='o', output=raster_reference) result = grass.find_file(name=raster_reference, element='raster') if result[u'fullname'] == u'': raster_reference = raster_reference + '.1' # option 1: set region to extent of tiles while precisely aligning pixel # geometry to raster_reference (including both raster_reference and raster_file) if raster_reference: grass.run_command('g.region', vector='tiles', flags='g', align=raster_reference) else: # option 2: change raster resolution to final resolution while best # effort aligning to pixel geometry grass.run_command('g.region', vector='tiles', flags='ap', res=resolution) # generate PDAL pipeline # . pdal pipline laz2json (STDOUT) | r.in.xyz bn = os.path.basename(infile) infile_format = bn.split('.')[-1] # format_reader from https://pdal.io/stages/readers.html format_reader = '' if infile_format.lower() == 'laz' or infile_format.lower() == 'las': format_reader = 'readers.las' # pts: not tested elif infile_format.lower() == 'pts': format_reader = 'readers.pts' else: grass.run_command('g.remove', flags='f', type='vector', name='tiles', quiet=True) grass.fatal(_("Format .%s is not supported.." % infile_format)) tmp_file_json = 'tmp_file_json_' + str(os.getpid()) data = {} data['pipeline'] = [] data['pipeline'].append({'type': format_reader, 'filename': infile}) data['pipeline'].append({ 'type': 'writers.text', 'format': 'csv', 'order': 'X,Y,Z', 'keep_unspecified': 'false', 'filename': 'STDOUT', 'quote_header': 'false' }) with open(tmp_file_json, 'w') as f: json.dump(data, f) tmp_xyz = grass.tempfile() if tmp_xyz is None: grass.fatal("Unable to create temporary files") command_pdal1 = options['pdal_cmd'].split(' ') if options['pdal_cmd'] != 'pdal': v_index = None cmd_entries = options['pdal_cmd'].split(' ') for cmd_entry, num in zip(cmd_entries, range(len(cmd_entries))): if cmd_entry == '-v': v_index = num break mnt_vol = cmd_entries[v_index + 1].split(':')[1] tmp_file_json2 = os.path.join(mnt_vol, tmp_file_json) else: tmp_file_json2 = tmp_file_json command_pdal1.extend(['pipeline', '--input', tmp_file_json2]) command_pdal2 = [ 'r.in.xyz', 'input=' + tmp_xyz, 'output=' + outfile, 'skip=1', 'separator=comma', 'method=' + method ] if zrange: command_pdal2.append('zrange=' + zrange) if zscale: command_pdal2.append('zscale=' + zscale) if output_type: command_pdal2.append('type=' + output_type) if percent: command_pdal2.append('percent=' + percent) if pth: command_pdal2.append('pth=' + pth) if trim: command_pdal2.append('trim=' + trim) fh = open(tmp_xyz, 'wb') if grass.call(command_pdal1, stdout=fh) != 0: fh.close() # check to see if pdal pipeline executed properly grass.fatal(_("pdal pipeline is broken...")) else: fh.close() if grass.call(command_pdal2, stdout=outdev) != 0: # check to see if r.in.xyz executed properly os.remove(tmp_xyz) grass.fatal(_("r.in.xyz is broken...")) # metadata empty_history = grass.tempfile() if empty_history is None: grass.fatal("Unable to create temporary files") f = open(empty_history, 'w') f.close() grass.run_command('r.support', map=outfile, source1=infile, description='generated by r.in.pdal', loadhistory=empty_history) grass.run_command('r.support', map=outfile, history=os.environ['CMDLINE']) os.remove(empty_history) # Cleanup grass.message(_("Cleaning up...")) grass.run_command('g.remove', flags='f', type='vector', name='tiles', quiet=True) if raster_file: grass.run_command('g.remove', flags='f', type='raster', pattern=raster_reference[:-1] + '*', quiet=True) os.remove(tmp_file_json) os.remove(tmp_xyz) grass.message(_("Generating output raster map <%s>...") % outfile) grass.del_temp_region()
def main(): if platform.system() == 'Windows': try: import winreg except ImportError: import _winreg as winreg try: try: key = winreg.OpenKey( winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall', 0, winreg.KEY_READ | winreg.KEY_WOW64_64KEY) count = (winreg.QueryInfoKey(key)[0]) - 1 while (count >= 0): subkeyR = winreg.EnumKey(key, count) if subkeyR.startswith('R for'): count = -1 else: count = count - 1 winreg.CloseKey(key) key = winreg.OpenKey( winreg.HKEY_LOCAL_MACHINE, str('SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\' + subkeyR), 0, winreg.KEY_READ | winreg.KEY_WOW64_64KEY) value = winreg.QueryValueEx(key, 'InstallLocation')[0] winreg.CloseKey(key) except: key = winreg.OpenKey( winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall', 0, winreg.KEY_READ | winreg.KEY_WOW64_32KEY) count = (winreg.QueryInfoKey(key)[0]) - 1 while (count >= 0): subkeyR = winreg.EnumKey(key, count) if subkeyR.startswith('R for'): count = -1 else: count = count - 1 winreg.CloseKey(key) key = winreg.OpenKey( winreg.HKEY_LOCAL_MACHINE, str('SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\' + subkeyR), 0, winreg.KEY_READ | winreg.KEY_WOW64_64KEY) value = winreg.QueryValueEx(key, 'InstallLocation')[0] winreg.CloseKey(key) grass.message(_("R is installed!")) pathtor = os.path.join(value, 'bin\\Rscript') except: grass.fatal("Please install R!") else: try: subprocess.call(['which', 'R']) grass.message(_("R is installed!")) pathtor = 'Rscript' except: grass.fatal("Please install R!") elevation = str(options['elevation']).split('@')[0] incheck = grass.find_file(name=elevation, element='cell') if not incheck['file']: grass.fatal("Raster map <%s> not found" % elevation) grass.use_temp_region() grass.run_command('g.region', rast=elevation) user_res = int(options['user_res']) if user_res == 0: gregion = grass.region() res_int = int(round(gregion['nsres'])) grass.message(_("Resolution is kept at: %i m" % res_int)) else: res_int = user_res grass.run_command('g.region', res=res_int) coarse_elevation = elevation + '%' + str(user_res) grass.run_command('r.resample', input=elevation, output=coarse_elevation) elevation = coarse_elevation grass.message(_("Resolution changed to: %s m" % user_res)) minneighb = int(options['minneighb']) maxneighb = int(options['maxneighb']) outpdf = str(options['profile']) if outpdf.split('.')[-1] != 'pdf': grass.fatal("File type for output TG calculation profile is not pdf") outcsv = str(options['table']) if outcsv.split('.')[-1] != 'csv': grass.fatal("File type for output TG calculation table is not csv") tgmaps = flags['c'] grassversion = grass.version() grassversion = grassversion.version[:1] TGargs = [ elevation, str(res_int), str(minneighb), str(maxneighb), outpdf, outcsv, str(int(tgmaps)), str(grassversion) ] pyscfold = os.path.dirname(os.path.realpath(__file__)) pathtosc = os.path.join(pyscfold, 'TG_jozsa.R') myRSCRIPT = [pathtor, pathtosc] + TGargs if not os.path.isfile(pathtosc): grass.fatal("Put TG calculation R script to GRASS scripts folder...") if tgmaps: grass.message( _("Will create map of cell-based TG value, relative relief...")) grass.message( _("Starting R to calculate Topographic Grain... this may take some time..." )) devnull = open(os.devnull, 'w') error = subprocess.call(myRSCRIPT, stdout=devnull, stderr=devnull) if error > 0: grass.message(_("R error log below...")) errorlog = os.path.join(os.path.dirname(outpdf), 'errorlog.Rout') Rerror = open(errorlog, 'r') grass.message(_(Rerror.read())) Rerror.close() grass.fatal("TG calculation failed...") else: grass.message( _("R process finished...Continue working in GRASS GIS...")) elevation = str(options['elevation']).split('@')[0] grass.run_command('g.region', rast=elevation) ## Check if creating geomorphon map flag is activated geom = flags['g'] if not geom: grass.message(_("Not creating geomorphometric map...")) with open(outcsv, 'r') as csvfile: outcsv = csv.reader(csvfile, delimiter=',') for row in outcsv: last = row searchtg = int(last[1]) if user_res != 0: gregion = grass.region() res_int = int(round(gregion['nsres'])) multiply = int(user_res / res_int) searchtg = int(searchtg * multiply) grass.message(_("Estimated topographic grain value is %i" % searchtg)) else: ## Check if r.geomorphon is installed if not grass.find_program('r.geomorphon', '--help'): grass.fatal( "r.geomorphon is not installed, run separately after installation" ) else: ## Input for r.geomorphon #elevation = elevation reread above with open(outcsv, 'r') as csvfile: outcsv = csv.reader(csvfile, delimiter=',') for row in outcsv: last = row searchtg = int(last[1]) if user_res != 0: multiply = int(user_res / int_res) searchtg = int(searchtg * multiply) skiptg = int(options['skiptg']) flattg = float(options['flattg']) disttg = int(options['disttg']) geom_map = str(options['geom_map']) if geom_map[:11] == '<elevation>': geom_map = str(elevation + geom_map[11:-4] + str(searchtg)) ## Print out settings for geomorphon mapping grass.message(_("Generating geomorphons map with settings below:")) grass.message(_("Elevation map: %s" % elevation)) grass.message(_("Search distance: %i" % searchtg)) grass.message(_("Skip radius: %i" % skiptg)) grass.message(_("Flatness threshold: %.2f" % flattg)) grass.message(_("Flatness distance: %i" % disttg)) grass.message( _("Output map: %s" % geom_map + " *existing map will be overwritten")) try: grass.run_command('r.geomorphon', elevation=elevation, search=searchtg, skip=skiptg, flat=flattg, dist=disttg, forms=geom_map) except: grass.run_command('r.geomorphon', dem=elevation, search=searchtg, skip=skiptg, flat=flattg, dist=disttg, forms=geom_map) grass.del_temp_region()
def main(): options, flags = gs.parser() # it does not check if pngs and other files exists, # maybe it could check the any/all file(s) dir if options['raster'] and options['strds']: gs.fatal(_("Options raster and strds cannot be specified together." " Please decide for one of them.")) if options['raster'] and options['where']: gs.fatal(_("Option where cannot be combined with the option raster." " Please don't set where option or use strds option" " instead of raster option.")) if options['raster']: if ',' in options['raster']: maps = options['raster'].split(',') # TODO: skip empty parts else: maps = [options['raster']] elif options['strds']: # import and init only when needed # init is called anyway when the generated form is used import grass.temporal as tgis strds = options['strds'] where = options['where'] # make sure the temporal database exists tgis.init() # create the space time raster object ds = tgis.open_old_space_time_dataset(strds, 'strds') # check if the dataset is in the temporal database if not ds.is_in_db(): gs.fatal(_("Space time dataset <%s> not found") % strds) # we need a database interface dbiface = tgis.SQLDatabaseInterfaceConnection() dbiface.connect() # the query rows = ds.get_registered_maps(columns='id', where=where, order='start_time') if not rows: gs.fatal(_("Cannot get any maps for spatio-temporal raster" " dataset <%s>." " Dataset is empty or you temporal WHERE" " condition filtered all maps out." " Please, specify another dataset," " put maps into this dataset" " or correct your WHERE condition.") % strds) maps = [row['id'] for row in rows] else: gs.fatal(_("Either raster or strds option must be specified." " Please specify one of them.")) # get the number of maps for later use num_maps = len(maps) out_dir = options['output'] if not os.path.exists(out_dir): # TODO: maybe we could create the last dir on specified path? gs.fatal(_("Output path <%s> does not exists." " You need to create the (empty) output directory" " yourself before running this module.") % out_dir) epsg = int(options['epsg']) if ',' in options['opacity']: opacities = [float(opacity) for opacity in options['opacity'].split(',')] if len(opacities) != num_maps: gs.fatal(_("Number of opacities <{no}> does not match number" " of maps <{nm}>.").format(no=len(opacities), nm=num_maps)) else: opacities = [float(options['opacity'])] * num_maps if ',' in options['info']: infos = options['info'].split(',') else: infos = [options['info']] if 'geotiff' in infos and not gs.find_program('r.out.tiff', '--help'): gs.fatal(_("Install r.out.tiff add-on module to export GeoTIFF")) # r.out.png options compression = int(options['compression']) # flag w is passed to r.out.png.proj # our flag n is inversion of r.out.png.proj's t flag # (transparent NULLs are better for overlay) # we always need the l flag (ll .wgs84 file) routpng_flags = '' if not flags['n']: routpng_flags += 't' if flags['w']: routpng_flags += 'w' # r.out.png.proj l flag for LL .wgs84 file is now function parameter # and is specified bellow if flags['m']: use_region = False # we will use map extent gs.use_temp_region() else: use_region = True # hard coded file names data_file_name = 'data_file.csv' js_data_file_name = 'data_file.js' data_file = open(os.path.join(out_dir, data_file_name), 'w') js_data_file = open(os.path.join(out_dir, js_data_file_name), 'w') js_data_file.write('/* This file was generated by r.out.leaflet GRASS GIS' ' module. */\n\n') js_data_file.write('var layerInfos = [\n') for i, map_name in enumerate(maps): if not use_region: gs.run_command('g.region', rast=map_name) if '@' in map_name: pure_map_name = map_name.split('@')[0] else: pure_map_name = map_name # TODO: mixing current and map's mapset at this point if '@' in map_name: map_name, src_mapset_name = map_name.split('@') else: # TODO: maybe mapset is mandatory for those out of current mapset? src_mapset_name = gs.gisenv()['MAPSET'] image_file_name = pure_map_name + '.png' image_file_path = os.path.join(out_dir, image_file_name) # TODO: skip writing to file and extract the information from # function, or use object if function is so large wgs84_file = image_file_path + '.wgs84' export_png_in_projection(map_name=map_name, src_mapset_name=src_mapset_name, output_file=image_file_path, epsg_code=epsg, compression=compression, routpng_flags=routpng_flags, wgs84_file=wgs84_file, use_region=True) data_file.write(pure_map_name + ',' + image_file_name + '\n') # it doesn't matter in which location we are, it just uses the current # location, not tested for LL loc, assuming that to be nop. map_extent = get_map_extent_for_file(wgs84_file) bounds = map_extent_to_js_leaflet_list(map_extent) extra_attributes = [] generate_infos(map_name=map_name, projected_png_file=image_file_path, required_infos=infos, output_directory=out_dir, attributes=extra_attributes) # http://www.w3schools.com/js/js_objects.asp js_data_file.write(""" {{title: "{title}", file: "{file_}",""" """ bounds: {bounds}, opacity: {opacity}""" .format(title=pure_map_name, file_=image_file_name, bounds=bounds, opacity=opacities[i])) if extra_attributes: extra_js_attributes = [pair[0] + ': "' + escape_quotes( escape_endlines( escape_backslashes( pair[1] ))) + '"' for pair in extra_attributes] js_data_file.write(', ' + ', '.join(extra_js_attributes)) js_data_file.write("""}\n""") # do not write after the last item if i < num_maps - 1: js_data_file.write(',') js_data_file.write('];\n') data_file.close()
def main(): global tmp, tmp_proj, tmp_gpx, tmp_extr, tmp_vogb format = options['format'] input = options['input'] layer = options['layer'] output = options['output'] type = options['type'] where = options['where'] wpt = flags['w'] rte = flags['r'] trk = flags['t'] nflags = len(filter(None, [wpt, rte, trk])) if nflags > 1: grass.fatal(_("One feature at a time please.")) if nflags < 1: grass.fatal(_("No features requested for export.")) # set some reasonable defaults if not type: if wpt: type = 'point' else: type = 'line' #### check for gpsbabel ### FIXME: may need --help or similar? if not grass.find_program("gpsbabel"): grass.fatal(_("The gpsbabel program was not found, please install it first.\n") + "http://gpsbabel.sourceforge.net") #### check for cs2cs if not grass.find_program("cs2cs"): grass.fatal(_("The cs2cs program was not found, please install it first.\n") + "http://proj.osgeo.org") # check if we will overwrite data if os.path.exists(output) and not grass.overwrite(): grass.fatal(_("Output file already exists.")) #### set temporary files tmp = grass.tempfile() # SQL extract if needed if where: grass.verbose("Extracting data ...") tmp_extr = "tmp_vogb_extr_%d" % os.getpid() ret = grass.run_command('v.extract', input = "$GIS_OPT_INPUT", output = tmp_extr, type = type, layer = layer, where = where, quiet = True) if ret != 0: grass.fatal(_("Error executing SQL query")) kv = grass.vector_info_topo(tmp_extr) if kv['primitives'] == 0: grass.fatal(_("SQL query returned an empty map (no %s features?)") % type) inmap = tmp_extr else: # g.copy "$GIS_OPT_INPUT,tmp_vogb_extr_$$" # to get a copy of DB into local mapset # INMAP="tmp_vogb_extr_$$" inmap = input #### set up projection info # TODO: check if we are already in ll/WGS84. If so skip m.proj step. # TODO: multi layer will probably fail badly due to sed 's/^ 1 /' # output as old GRASS 4 vector ascii and fight with dig_ascii/? # Change to s/^ \([0-9] .*\) /# \1/' ??? mmph. # reproject to lat/lon WGS84 grass.verbose("Reprojecting data ...") re1 = re.compile(r'^\([PLBCFKA]\)') re2 = re.compile(r'^ 1 ') re3 = re.compile(r'\t\([-\.0-9]*\) .*') re4 = re.compile(r'^\([-\.0-9]\)') re5 = re.compile(r'^#') tmp_proj = tmp + ".proj" tf = open(tmp_proj, 'w') p1 = grass.pipe_command('v.out.ascii', input = inmap, format = 'standard') p2 = grass.feed_command('m.proj', input = '-', flags = 'od', quiet = True, stdout = tf) tf.close() lineno = 0 for line in p1.stdout: lineno += 1 if lineno < 11: continue line = re1.sub(r'#\1', line) line = re2.sub(r'# 1 ', line) p2.stdin.write(line) p2.stdin.close() p1.wait() p2.wait() if p1.returncode != 0 or p2.returncode != 0: grass.fatal(_("Error reprojecting data")) tmp_vogb = "tmp_vogb_epsg4326_%d" % os.getpid() p3 = grass.feed_command('v.in.ascii', out = tmp_vogb, format = 'standard', flags = 'n', quiet = True) tf = open(tmp_proj, 'r') for line in tf: line = re3.sub(r' \1', line) line = re4.sub(r' \1', line) line = re5.sub('', line) p3.stdin.write(line) p3.stdin.close() tf.close() p3.wait() if p3.returncode != 0: grass.fatal(_("Error reprojecting data")) # don't v.db.connect directly as source table will be removed with # temporary map in that case. So we make a temp copy of it to work with. kv = vector_db(inmap) if layer in kv: db_params = kv[layer] db_table = db_params['table'] db_key = db_params['key'] db_database = db_params['database'] db_driver = db_params['driver'] ret = grass.run_command('db.copy', from_driver = db_driver, from_database = db_database, from_table = db_table, to_table = tmp_vogb) if ret != 0: grass.fatal(_("Error copying temporary DB")) ret = grass.run_command('v.db.connect', map = tmp_vogb, table = tmp_vogb, quiet = True) if ret != 0: grass.fatal(_("Error reconnecting temporary DB")) # export as GPX using v.out.ogr if trk: linetype = "FORCE_GPX_TRACK=YES" elif rte: linetype = "FORCE_GPX_TRACK=YES" else: linetype = None # BUG: cat is being reported as evelation and attribute output is skipped. # (v.out.ogr DB reading or ->OGR GPX driver bug<- # resolved: see new Create opts at http://www.gdal.org/ogr/drv_gpx.html) # v.out.ogr -> shapefile -> GPX works, but we try to avoid that as it's # lossy. Also that would allow ogr2ogr -a_srs $IN_PROJ -t_srs EPSG:4326 # so skip m.proj pains.. if that is done ogr2ogr -s_srs MUST HAVE +wktext # with PROJ.4 terms or else the +nadgrids will be ignored! best to feed # it IN_PROJ="`g.proj -jf` +wktext" in that case. grass.verbose("Exporting data ...") tmp_gpx = tmp + ".gpx" ret = grass.run_command('v.out.ogr', input = tmp_vogb, dsn = tmp_gpx, type = type, format = 'GPX', lco = linetype, dsco = "GPX_USE_EXTENSIONS=YES", quiet = True) if ret != 0: grass.fatal(_("Error exporting data")) if format == 'gpx': # short circuit, we have what we came for. grass.try_remove(output) os.rename(tmp_gpx, output) grass.verbose("Fast exit.") sys.exit() # run gpsbabel if wpt: gtype = '-w' elif trk: gtype = '-t' elif rte: gtype = '-r' else: gtype = '' grass.verbose("Running GPSBabel ...") ret = grass.call(['gpsbabel', gtype, '-i', 'gpx', '-f', tmp + '.gpx', '-o', format, '-F', output]) if ret != 0: grass.fatal(_("Error running GPSBabel")) grass.verbose("Done.")
def main(): inputraster = options['input'] number_lines = int(options['number_lines']) edge_detection_algorithm = options['edge_detection'] no_edge_friction = int(options['no_edge_friction']) lane_border_multiplier = int(options['lane_border_multiplier']) min_tile_size = None if options['min_tile_size']: min_tile_size = float(options['min_tile_size']) existing_cutlines = None if options['existing_cutlines']: existing_cutlines = options['existing_cutlines'].split(',') tiles = options['output'] memory = int(options['memory']) tiled = False if options['tile_width']: tiled = True gscript.message(_("Using tiles processing for edge detection")) width = int(options['tile_width']) height = int(options['tile_height']) overlap = int(options['overlap']) processes = int(options['processes']) global temp_maps temp_maps = [] r = 'raster' v = 'vector' if existing_cutlines: existingcutlinesmap = 'temp_icutlines_existingcutlinesmap_%i' % os.getpid( ) if len(existing_cutlines) > 1: gscript.run_command('v.patch', input_=existing_cutlines, output=existingcutlinesmap, quiet=True, overwrite=True) existing_cutlines = existingcutlinesmap gscript.run_command('v.to.rast', input_=existing_cutlines, output=existingcutlinesmap, use='val', type_='line,boundary', overwrite=True, quiet=True) temp_maps.append([existingcutlinesmap, r]) temp_edge_map = "temp_icutlines_edgemap_%d" % os.getpid() temp_maps.append([temp_edge_map, r]) gscript.message( _("Creating edge map using <%s> edgedetection algorithm") % edge_detection_algorithm) if edge_detection_algorithm == 'zc': kwargs = { 'input': inputraster, 'output': temp_edge_map, 'width_': int(options['zc_width']), 'threshold': float(options['zc_threshold']), 'quiet': True } if tiled: grd = GridModule('i.zc', width=width, height=height, overlap=overlap, processes=processes, split=False, **kwargs) grd.run() else: gscript.run_command('i.zc', **kwargs) elif edge_detection_algorithm == 'canny': if not gscript.find_program('i.edge', '--help'): message = _("You need to install the addon i.edge to use ") message += _("the Canny edge detector.\n") message += _( " You can install the addon with 'g.extension i.edge'") gscript.fatal(message) kwargs = { 'input': inputraster, 'output': temp_edge_map, 'low_threshold': float(options['canny_low_threshold']), 'high_threshold': float(options['canny_high_threshold']), 'sigma': float(options['canny_sigma']), 'quiet': True } if tiled: grd = GridModule('i.edge', width=width, height=height, overlap=overlap, processes=processes, split=False, flags='n', **kwargs) grd.run() else: gscript.run_command('i.edge', flags='n', **kwargs) else: gscript.fatal( "Only zero-crossing and Canny available as edge detection algorithms." ) region = gscript.region() gscript.message(_("Finding cutlines in both directions")) nsrange = float(region.n - region.s - region.nsres) ewrange = float(region.e - region.w - region.ewres) if nsrange > ewrange: hnumber_lines = number_lines vnumber_lines = max(int(number_lines * (ewrange / nsrange)), 1) else: vnumber_lines = number_lines hnumber_lines = max(int(number_lines * (nsrange / ewrange)), 1) # Create the lines in horizonal direction nsstep = float(region.n - region.s - region.nsres) / hnumber_lines hpointsy = [((region.n - i * nsstep) - region.nsres / 2.0) for i in range(0, hnumber_lines + 1)] hlanepointsy = [y - nsstep / 2.0 for y in hpointsy] hstartpoints = listzip([region.w + 0.2 * region.ewres] * len(hpointsy), hpointsy) hstoppoints = listzip([region.e - 0.2 * region.ewres] * len(hpointsy), hpointsy) hlanestartpoints = listzip([region.w + 0.2 * region.ewres] * len(hlanepointsy), hlanepointsy) hlanestoppoints = listzip([region.e - 0.2 * region.ewres] * len(hlanepointsy), hlanepointsy) hlanemap = 'temp_icutlines_hlanemap_%i' % os.getpid() temp_maps.append([hlanemap, v]) temp_maps.append([hlanemap, r]) os.environ['GRASS_VERBOSE'] = '0' new = VectorTopo(hlanemap) new.open('w') for line in listzip(hlanestartpoints, hlanestoppoints): new.write(geom.Line(line), cat=1) new.close() del os.environ['GRASS_VERBOSE'] gscript.run_command('v.to.rast', input_=hlanemap, output=hlanemap, use='val', type_='line', overwrite=True, quiet=True) hbasemap = 'temp_icutlines_hbasemap_%i' % os.getpid() temp_maps.append([hbasemap, r]) # Building the cost maps using the following logic # - Any pixel not on an edge, nor on an existing cutline gets a # no_edge_friction cost, or no_edge_friction_cost x 10 if there are # existing cutlines # - Any pixel on an edge gets a cost of 1 if there are no existing cutlines, # and a cost of no_edge_friction if there are # - A lane line gets a very high cost (lane_border_multiplier x cost of no # edge pixel - the latter depending on the existence of cutlines). mapcalc_expression = "%s = " % hbasemap mapcalc_expression += "if(isnull(%s), " % hlanemap if existing_cutlines: mapcalc_expression += "if(%s == 0 && isnull(%s), " % ( temp_edge_map, existingcutlinesmap) mapcalc_expression += "%i, " % (no_edge_friction * 10) mapcalc_expression += "if(isnull(%s), %s, 1))," % (existingcutlinesmap, no_edge_friction) mapcalc_expression += "%i)" % (lane_border_multiplier * no_edge_friction * 10) else: mapcalc_expression += "if(%s == 0, " % temp_edge_map mapcalc_expression += "%i, " % no_edge_friction mapcalc_expression += "1), " mapcalc_expression += "%i)" % (lane_border_multiplier * no_edge_friction) gscript.run_command('r.mapcalc', expression=mapcalc_expression, quiet=True, overwrite=True) hcumcost = 'temp_icutlines_hcumcost_%i' % os.getpid() temp_maps.append([hcumcost, r]) hdir = 'temp_icutlines_hdir_%i' % os.getpid() temp_maps.append([hdir, r]) # Create the lines in vertical direction ewstep = float(region.e - region.w - region.ewres) / vnumber_lines vpointsx = [((region.e - i * ewstep) - region.ewres / 2.0) for i in range(0, vnumber_lines + 1)] vlanepointsx = [x + ewstep / 2.0 for x in vpointsx] vstartpoints = listzip(vpointsx, [region.n - 0.2 * region.nsres] * len(vpointsx)) vstoppoints = listzip(vpointsx, [region.s + 0.2 * region.nsres] * len(vpointsx)) vlanestartpoints = listzip(vlanepointsx, [region.n - 0.2 * region.nsres] * len(vlanepointsx)) vlanestoppoints = listzip(vlanepointsx, [region.s + 0.2 * region.nsres] * len(vlanepointsx)) vlanemap = 'temp_icutlines_vlanemap_%i' % os.getpid() temp_maps.append([vlanemap, v]) temp_maps.append([vlanemap, r]) os.environ['GRASS_VERBOSE'] = '0' new = VectorTopo(vlanemap) new.open('w') for line in listzip(vlanestartpoints, vlanestoppoints): new.write(geom.Line(line), cat=1) new.close() del os.environ['GRASS_VERBOSE'] gscript.run_command('v.to.rast', input_=vlanemap, output=vlanemap, use='val', type_='line', overwrite=True, quiet=True) vbasemap = 'temp_icutlines_vbasemap_%i' % os.getpid() temp_maps.append([vbasemap, r]) mapcalc_expression = "%s = " % vbasemap mapcalc_expression += "if(isnull(%s), " % vlanemap if existing_cutlines: mapcalc_expression += "if(%s == 0 && isnull(%s), " % ( temp_edge_map, existingcutlinesmap) mapcalc_expression += "%i, " % (no_edge_friction * 10) mapcalc_expression += "if(isnull(%s), %s, 1))," % (existingcutlinesmap, no_edge_friction) mapcalc_expression += "%i)" % (lane_border_multiplier * no_edge_friction * 10) else: mapcalc_expression += "if(%s == 0, " % temp_edge_map mapcalc_expression += "%i, " % no_edge_friction mapcalc_expression += "1), " mapcalc_expression += "%i)" % (lane_border_multiplier * no_edge_friction) gscript.run_command('r.mapcalc', expression=mapcalc_expression, quiet=True, overwrite=True) vcumcost = 'temp_icutlines_vcumcost_%i' % os.getpid() temp_maps.append([vcumcost, r]) vdir = 'temp_icutlines_vdir_%i' % os.getpid() temp_maps.append([vdir, r]) if processes > 1: pmemory = memory / 2.0 rcv = gscript.start_command('r.cost', input_=vbasemap, startcoordinates=vstartpoints, stopcoordinates=vstoppoints, output=vcumcost, outdir=vdir, memory=pmemory, quiet=True, overwrite=True) rch = gscript.start_command('r.cost', input_=hbasemap, startcoordinates=hstartpoints, stopcoordinates=hstoppoints, output=hcumcost, outdir=hdir, memory=pmemory, quiet=True, overwrite=True) rcv.wait() rch.wait() else: gscript.run_command('r.cost', input_=vbasemap, startcoordinates=vstartpoints, stopcoordinates=vstoppoints, output=vcumcost, outdir=vdir, memory=memory, quiet=True, overwrite=True) gscript.run_command('r.cost', input_=hbasemap, startcoordinates=hstartpoints, stopcoordinates=hstoppoints, output=hcumcost, outdir=hdir, memory=memory, quiet=True, overwrite=True) hlines = 'temp_icutlines_hlines_%i' % os.getpid() temp_maps.append([hlines, r]) vlines = 'temp_icutlines_vlines_%i' % os.getpid() temp_maps.append([vlines, r]) if processes > 1: rdh = gscript.start_command('r.drain', input_=hcumcost, direction=hdir, startcoordinates=hstoppoints, output=hlines, flags='d', quiet=True, overwrite=True) rdv = gscript.start_command('r.drain', input_=vcumcost, direction=vdir, startcoordinates=vstoppoints, output=vlines, flags='d', quiet=True, overwrite=True) rdh.wait() rdv.wait() else: gscript.run_command('r.drain', input_=hcumcost, direction=hdir, startcoordinates=hstoppoints, output=hlines, flags='d', quiet=True, overwrite=True) gscript.run_command('r.drain', input_=vcumcost, direction=vdir, startcoordinates=vstoppoints, output=vlines, flags='d', quiet=True, overwrite=True) # Combine horizonal and vertical lines temp_raster_tile_borders = 'temp_icutlines_raster_tile_borders_%i' % os.getpid( ) temp_maps.append([temp_raster_tile_borders, r]) gscript.run_command('r.patch', input_=[hlines, vlines], output=temp_raster_tile_borders, quiet=True, overwrite=True) gscript.message(_("Creating vector polygons")) # Create vector polygons # First we need to shrink the region a bit to make sure that all vector # points / lines fall within the raster gscript.use_temp_region() gscript.run_command('g.region', s=region.s + region.nsres, e=region.e - region.ewres, quiet=True) region_map = 'temp_icutlines_region_map_%i' % os.getpid() temp_maps.append([region_map, v]) temp_maps.append([region_map, r]) gscript.run_command('v.in.region', output=region_map, type_='line', quiet=True, overwrite=True) gscript.del_temp_region() gscript.run_command('v.to.rast', input_=region_map, output=region_map, use='val', type_='line', quiet=True, overwrite=True) temp_raster_polygons = 'temp_icutlines_raster_polygons_%i' % os.getpid() temp_maps.append([temp_raster_polygons, r]) gscript.run_command('r.patch', input_=[temp_raster_tile_borders, region_map], output=temp_raster_polygons, quiet=True, overwrite=True) temp_raster_polygons_thin = 'temp_icutlines_raster_polygons_thin_%i' % os.getpid( ) temp_maps.append([temp_raster_polygons_thin, r]) gscript.run_command('r.thin', input_=temp_raster_polygons, output=temp_raster_polygons_thin, quiet=True, overwrite=True) # Create a series of temporary map names as we have to go # through several steps until we reach the final map. temp_vector_polygons1 = 'temp_icutlines_vector_polygons1_%i' % os.getpid() temp_maps.append([temp_vector_polygons1, v]) temp_vector_polygons2 = 'temp_icutlines_vector_polygons2_%i' % os.getpid() temp_maps.append([temp_vector_polygons2, v]) temp_vector_polygons3 = 'temp_icutlines_vector_polygons3_%i' % os.getpid() temp_maps.append([temp_vector_polygons3, v]) temp_vector_polygons4 = 'temp_icutlines_vector_polygons4_%i' % os.getpid() temp_maps.append([temp_vector_polygons4, v]) gscript.run_command('r.to.vect', input_=temp_raster_polygons_thin, output=temp_vector_polygons1, type_='line', flags='t', quiet=True, overwrite=True) # Erase all category values from the lines gscript.run_command('v.category', input_=temp_vector_polygons1, op='del', cat='-1', output=temp_vector_polygons2, quiet=True, overwrite=True) # Transform lines to boundaries gscript.run_command('v.type', input_=temp_vector_polygons2, from_type='line', to_type='boundary', output=temp_vector_polygons3, quiet=True, overwrite=True) # Add centroids gscript.run_command('v.centroids', input_=temp_vector_polygons3, output=temp_vector_polygons4, quiet=True, overwrite=True) # If a threshold is given erase polygons that are too small if min_tile_size: gscript.run_command('v.clean', input_=temp_vector_polygons4, tool=['rmdangle', 'rmarea'], threshold=[-1, min_tile_size], output=tiles, quiet=True, overwrite=True) else: gscript.run_command('g.copy', vect=[temp_vector_polygons4, tiles], quiet=True, overwrite=True) gscript.vector_history(tiles)
def __init__(self, parent, giface, settings, scaniface): wx.Panel.__init__(self, parent) self.group = None self.segment = 'segment' self.segment_clump = 'segment_clump' self.signature = 'signature' self.classification = 'classification' self.filtered_classification = 'fclassification' self.reject = 'reject' self.output = 'objects' self.hasSuperpixels = gscript.find_program('i.superpixels.slic', '--help') self.env = None self.giface = giface self.parent = parent self.settings = settings self.scaniface = scaniface self.settingsChanged = Signal('ColorInteractionPanel.settingsChanged') if 'color' not in self.settings: self.settings['color'] = {} self.settings['color']['active'] = False self.settings['color']['name'] = '' self.settings['color']['training'] = '' self.hide = [] self.ifColor = wx.CheckBox(self, label=_("Save color rasters:")) self.ifColor.SetValue(self.settings['color']['active']) self.ifColor.Bind(wx.EVT_CHECKBOX, self.OnChange) self.exportColor = Select(self, size=(-1, -1), type='raster') self.exportColor.SetValue(self.settings['color']['name']) self.exportColor.Bind(wx.EVT_TEXT, self.OnChange) self.hide.append(self.exportColor) if self.settings['color']['name']: self.group = self.settings['color']['name'] self.trainingAreas = Select(self, size=(-1, -1), type='raster') self.trainingAreas.SetValue(self.settings['color']['training']) self.trainingAreas.Bind(wx.EVT_TEXT, self.OnChange) labelTraining = wx.StaticText(self, label=_("Training areas:")) self.hide.append(self.trainingAreas) self.hide.append(labelTraining) calibrateBtn = wx.Button(self, label=_("Calibrate")) calibrateBtn.Bind(wx.EVT_BUTTON, self.OnCalibration) self.hide.append(calibrateBtn) analyzeBtn = wx.Button(self, label=_("Scan and process")) analyzeBtn.Bind(wx.EVT_BUTTON, self.OnAnalysis) self.hide.append(analyzeBtn) self.mainSizer = wx.BoxSizer(wx.VERTICAL) sizer = wx.BoxSizer(wx.HORIZONTAL) sizer.Add(self.ifColor, flag=wx.ALIGN_CENTER_VERTICAL, border=5) sizer.Add(self.exportColor, proportion=1, flag=wx.ALIGN_CENTER_VERTICAL, border=5) self.mainSizer.Add(sizer, flag=wx.EXPAND | wx.ALL, border=5) sizer = wx.BoxSizer(wx.HORIZONTAL) sizer.Add(labelTraining, flag=wx.ALIGN_CENTER_VERTICAL, border=5) sizer.Add(self.trainingAreas, proportion=1, flag=wx.ALIGN_CENTER_VERTICAL, border=5) sizer.Add(calibrateBtn, flag=wx.ALIGN_CENTER_VERTICAL, border=5) self.mainSizer.Add(sizer, flag=wx.EXPAND | wx.ALL, border=5) sizer = wx.BoxSizer(wx.HORIZONTAL) sizer.AddStretchSpacer() sizer.Add(analyzeBtn, flag=wx.ALIGN_CENTER_VERTICAL, border=5) self.mainSizer.Add(sizer, flag=wx.EXPAND | wx.ALL, border=5) self.SetSizer(self.mainSizer) self.mainSizer.Fit(self) self._enable()
def main(): global rm_rasters memory = int(options['memory']) input = options['input'] new_mapset = options['mapsetid'] pattern = options['pattern'] flag = '' if flags['r']: flag += 'r' if flags['c']: flag += 'c' if flags['j']: flag += 'j' # check if we have the i.sentinel.import addon if not grass.find_program('i.sentinel.import', '--help'): grass.fatal( _("The 'i.sentinel.import' module was not found, install it first:" ) + "\n" + "g.extension i.sentinel") # set some common environmental variables, like: os.environ.update( dict(GRASS_COMPRESS_NULLS='1', GRASS_COMPRESSOR='LZ4', GRASS_MESSAGE_FORMAT='plain')) # actual mapset, location, ... env = grass.gisenv() gisdbase = env['GISDBASE'] location = env['LOCATION_NAME'] old_mapset = env['MAPSET'] grass.message("New mapset: <%s>" % new_mapset) grass.utils.try_rmdir(os.path.join(gisdbase, location, new_mapset)) # create a private GISRC file for each job gisrc = os.environ['GISRC'] newgisrc = "%s_%s" % (gisrc, str(os.getpid())) grass.try_remove(newgisrc) shutil.copyfile(gisrc, newgisrc) os.environ['GISRC'] = newgisrc # change mapset grass.message("GISRC: <%s>" % os.environ['GISRC']) grass.run_command('g.mapset', flags='c', mapset=new_mapset) # Test memory settings free_ram = freeRAM('MB', 100) if free_ram < memory: memory = free_ram grass.warning("Free RAM only %d MB. <memory> set to it." % (memory)) # import data grass.message( _("Importing (and reprojecting as needed) Sentinel-2 data...")) kwargs = { 'input': input, 'memory': memory, 'pattern': pattern, 'flags': flag } if options['region']: grass.run_command('g.region', region=options['region'] + '@' + old_mapset) kwargs['extent'] = 'region' if options['metadata']: kwargs['metadata'] = options['metadata'] kwargsstr = "" flagstr = "" for key, val in kwargs.items(): if not key == "flags": kwargsstr += (" %s='%s'" % (key, val)) else: flagstr += val cmd = grass.Popen("i.sentinel.import --q %s -%s" % (kwargsstr, flagstr), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) resp = cmd.communicate() for resp_line in resp: if 'Input raster does not overlap current computational region' in resp_line.decode( "utf-8"): grass.warning( _("Input raster <%s> does not overlap current computational region" ) % options['input']) # resampling if flags['i']: grass.message('Resampling bands to 10m') raster = list(grass.parse_command('g.list', type='raster').keys()) if len(raster) < 1: grass.fatal('No band found') grass.run_command('g.region', raster=raster[0], res=10, flags='pa') # get all rasters to be resampled raster_resamp_list = list( grass.parse_command('g.list', type='raster', pattern='*B*_10m').keys()) list_20m = list( grass.parse_command('g.list', type='raster', pattern='*B*_20m').keys()) list_60m = list( grass.parse_command('g.list', type='raster', pattern='*B*_60m').keys()) raster_resamp_list.extend(list_20m) raster_resamp_list.extend(list_60m) # resample if len(raster_resamp_list) > 0: for raster in raster_resamp_list: outname = raster if raster.endswith('10m'): grass.run_command('g.rename', raster="%s,%sTMP" % (raster, raster)) raster = "%sTMP" % (raster) if raster.endswith('20m'): outname = outname.replace('20m', '10m') elif raster.endswith('60m'): outname = outname.replace('60m', '10m') grass.run_command('r.resamp.interp', input=raster, output=outname, method='bilinear', quiet=True) # remove the old bands rm_rasters.append(raster)
def main(): options, flags = gs.parser() # it does not check if pngs and other files exists, # maybe it could check the any/all file(s) dir if options['raster'] and options['strds']: gs.fatal( _("Options raster and strds cannot be specified together." " Please decide for one of them.")) if options['raster'] and options['where']: gs.fatal( _("Option where cannot be combined with the option raster." " Please don't set where option or use strds option" " instead of raster option.")) if options['raster']: if ',' in options['raster']: maps = options['raster'].split(',') # TODO: skip empty parts else: maps = [options['raster']] elif options['strds']: # import and init only when needed # init is called anyway when the generated form is used import grass.temporal as tgis strds = options['strds'] where = options['where'] # make sure the temporal database exists tgis.init() # create the space time raster object ds = tgis.open_old_space_time_dataset(strds, 'strds') # check if the dataset is in the temporal database if not ds.is_in_db(): gs.fatal(_("Space time dataset <%s> not found") % strds) # we need a database interface dbiface = tgis.SQLDatabaseInterfaceConnection() dbiface.connect() # the query rows = ds.get_registered_maps(columns='id', where=where, order='start_time') if not rows: gs.fatal( _("Cannot get any maps for spatio-temporal raster" " dataset <%s>." " Dataset is empty or you temporal WHERE" " condition filtered all maps out." " Please, specify another dataset," " put maps into this dataset" " or correct your WHERE condition.") % strds) maps = [row['id'] for row in rows] else: gs.fatal( _("Either raster or strds option must be specified." " Please specify one of them.")) # get the number of maps for later use num_maps = len(maps) out_dir = options['output'] if not os.path.exists(out_dir): # TODO: maybe we could create the last dir on specified path? gs.fatal( _("Output path <%s> does not exists." " You need to create the (empty) output directory" " yourself before running this module.") % out_dir) epsg = int(options['epsg']) if ',' in options['opacity']: opacities = [ float(opacity) for opacity in options['opacity'].split(',') ] if len(opacities) != num_maps: gs.fatal( _("Number of opacities <{no}> does not match number" " of maps <{nm}>.").format(no=len(opacities), nm=num_maps)) else: opacities = [float(options['opacity'])] * num_maps if ',' in options['info']: infos = options['info'].split(',') else: infos = [options['info']] if 'geotiff' in infos and not gs.find_program('r.out.tiff', '--help'): gs.fatal(_("Install r.out.tiff add-on module to export GeoTIFF")) # r.out.png options compression = int(options['compression']) # flag w is passed to r.out.png.proj # our flag n is inversion of r.out.png.proj's t flag # (transparent NULLs are better for overlay) # we always need the l flag (ll .wgs84 file) routpng_flags = '' if not flags['n']: routpng_flags += 't' if flags['w']: routpng_flags += 'w' # r.out.png.proj l flag for LL .wgs84 file is now function parameter # and is specified bellow if flags['m']: use_region = False # we will use map extent gs.use_temp_region() else: use_region = True # hard coded file names data_file_name = 'data_file.csv' js_data_file_name = 'data_file.js' data_file = open(os.path.join(out_dir, data_file_name), 'w') js_data_file = open(os.path.join(out_dir, js_data_file_name), 'w') js_data_file.write('/* This file was generated by r.out.leaflet GRASS GIS' ' module. */\n\n') js_data_file.write('var layerInfos = [\n') for i, map_name in enumerate(maps): if not use_region: gs.run_command('g.region', rast=map_name) if '@' in map_name: pure_map_name = map_name.split('@')[0] else: pure_map_name = map_name # TODO: mixing current and map's mapset at this point if '@' in map_name: map_name, src_mapset_name = map_name.split('@') else: # TODO: maybe mapset is mandatory for those out of current mapset? src_mapset_name = gs.gisenv()['MAPSET'] image_file_name = pure_map_name + '.png' image_file_path = os.path.join(out_dir, image_file_name) # TODO: skip writing to file and extract the information from # function, or use object if function is so large wgs84_file = image_file_path + '.wgs84' export_png_in_projection(map_name=map_name, src_mapset_name=src_mapset_name, output_file=image_file_path, epsg_code=epsg, compression=compression, routpng_flags=routpng_flags, wgs84_file=wgs84_file, use_region=True) data_file.write(pure_map_name + ',' + image_file_name + '\n') # it doesn't matter in which location we are, it just uses the current # location, not tested for LL loc, assuming that to be nop. map_extent = get_map_extent_for_file(wgs84_file) bounds = map_extent_to_js_leaflet_list(map_extent) extra_attributes = [] generate_infos(map_name=map_name, projected_png_file=image_file_path, required_infos=infos, output_directory=out_dir, attributes=extra_attributes) # http://www.w3schools.com/js/js_objects.asp js_data_file.write(""" {{title: "{title}", file: "{file_}",""" """ bounds: {bounds}, opacity: {opacity}""".format( title=pure_map_name, file_=image_file_name, bounds=bounds, opacity=opacities[i])) if extra_attributes: extra_js_attributes = [ pair[0] + ': "' + escape_quotes(escape_endlines(escape_backslashes(pair[1]))) + '"' for pair in extra_attributes ] js_data_file.write(', ' + ', '.join(extra_js_attributes)) js_data_file.write("""}\n""") # do not write after the last item if i < num_maps - 1: js_data_file.write(',') js_data_file.write('];\n') data_file.close()
def main(): global vrtfile, tmpfile infile = options['input'] rast = options['output'] also = flags['a'] #### check for gdalinfo (just to check if installation is complete) if not grass.find_program('gdalinfo', '--help'): grass.fatal(_("'gdalinfo' not found, install GDAL tools first (http://www.gdal.org)")) pid = str(os.getpid()) tmpfile = grass.tempfile() ################### let's go spotdir = os.path.dirname(infile) spotname = grass.basename(infile, 'hdf') if rast: name = rast else: name = spotname if not grass.overwrite() and grass.find_file(name)['file']: grass.fatal(_("<%s> already exists. Aborting.") % name) # still a ZIP file? (is this portable?? see the r.in.srtm script for ideas) if infile.lower().endswith('.zip'): grass.fatal(_("Please extract %s before import.") % infile) try: p = grass.Popen(['file', '-ib', infile], stdout = grass.PIPE) s = p.communicate()[0] if s == "application/x-zip": grass.fatal(_("Please extract %s before import.") % infile) except: pass ### create VRT header for NDVI projfile = os.path.join(spotdir, "0001_LOG.TXT") vrtfile = tmpfile + '.vrt' # first process the NDVI: grass.try_remove(vrtfile) create_VRT_file(projfile, vrtfile, infile) ## let's import the NDVI map... grass.message(_("Importing SPOT VGT NDVI map...")) try: grass.run_command('r.in.gdal', input=vrtfile, output=name) except CalledModuleError: grass.fatal(_("An error occurred. Stop.")) grass.message(_("Imported SPOT VEGETATION NDVI map <%s>.") % name) ################# ## http://www.vgt.vito.be/faq/FAQS/faq19.html # What is the relation between the digital number and the real NDVI ? # Real NDVI =coefficient a * Digital Number + coefficient b # = a * DN +b # # Coefficient a = 0.004 # Coefficient b = -0.1 # clone current region # switch to a temporary region grass.use_temp_region() grass.run_command('g.region', raster = name, quiet = True) grass.message(_("Remapping digital numbers to NDVI...")) tmpname = "%s_%s" % (name, pid) grass.mapcalc("$tmpname = 0.004 * $name - 0.1", tmpname = tmpname, name = name) grass.run_command('g.remove', type = 'raster', name = name, quiet = True, flags = 'f') grass.run_command('g.rename', raster = (tmpname, name), quiet = True) # write cmd history: grass.raster_history(name) #apply color table: grass.run_command('r.colors', map = name, color = 'ndvi', quiet = True) ########################## # second, optionally process the SM quality map: #SM Status Map # http://nieuw.vgt.vito.be/faq/FAQS/faq22.html #Data about # Bit NR 7: Radiometric quality for B0 coded as 0 if bad and 1 if good # Bit NR 6: Radiometric quality for B2 coded as 0 if bad and 1 if good # Bit NR 5: Radiometric quality for B3 coded as 0 if bad and 1 if good # Bit NR 4: Radiometric quality for MIR coded as 0 if bad and 1 if good # Bit NR 3: land code 1 or water code 0 # Bit NR 2: ice/snow code 1 , code 0 if there is no ice/snow # Bit NR 1: 0 0 1 1 # Bit NR 0: 0 1 0 1 # clear shadow uncertain cloud # #Note: # pos 7 6 5 4 3 2 1 0 (bit position) # 128 64 32 16 8 4 2 1 (values for 8 bit) # # # Bit 4-7 should be 1: their sum is 240 # Bit 3 land code, should be 1, sum up to 248 along with higher bits # Bit 2 ice/snow code # Bit 0-1 should be 0 # # A good map threshold: >= 248 if also: grass.message(_("Importing SPOT VGT NDVI quality map...")) grass.try_remove(vrtfile) qname = spotname.replace('NDV','SM') qfile = os.path.join(spotdir, qname) create_VRT_file(projfile, vrtfile, qfile) ## let's import the SM quality map... smfile = name + '.sm' try: grass.run_command('r.in.gdal', input=vrtfile, output=smfile) except CalledModuleError: grass.fatal(_("An error occurred. Stop.")) # some of the possible values: rules = [r + '\n' for r in [ '8 50 50 50', '11 70 70 70', '12 90 90 90', '60 grey', '155 blue', '232 violet', '235 red', '236 brown', '248 orange', '251 yellow', '252 green' ]] grass.write_command('r.colors', map = smfile, rules = '-', stdin = rules) grass.message(_("Imported SPOT VEGETATION SM quality map <%s>.") % smfile) grass.message(_("Note: A snow map can be extracted by category 252 (d.rast %s cat=252)") % smfile) grass.message("") grass.message(_("Filtering NDVI map by Status Map quality layer...")) filtfile = "%s_filt" % name grass.mapcalc("$filtfile = if($smfile % 4 == 3 || ($smfile / 16) % 16 == 0, null(), $name)", filtfile = filtfile, smfile = smfile, name = name) grass.run_command('r.colors', map = filtfile, color = 'ndvi', quiet = True) grass.message(_("Filtered SPOT VEGETATION NDVI map <%s>.") % filtfile) # write cmd history: grass.raster_history(smfile) grass.raster_history(filtfile) grass.message(_("Done."))
def main(): global insert_sql insert_sql = None global temporary_vect temporary_vect = None global stats_temp_file stats_temp_file = None segment_map = options['map'] csvfile = options['csvfile'] if options['csvfile'] else [] vectormap = options['vectormap'] if options['vectormap'] else [] global rasters rasters = options['rasters'].split(',') if options['rasters'] else [] area_measures = options['area_measures'].split(',') if ( options['area_measures'] and not flags['s']) else [] if area_measures: if not gscript.find_program('r.object.geometry', '--help'): message = _( "You need to install the addon r.object.geometry to be able") message += _(" to calculate area measures.\n") message += _( " You can install the addon with 'g.extension r.object.geometry'" ) gscript.fatal(message) neighborhood = True if flags['n'] else False if neighborhood: if not gscript.find_program('r.neighborhoodmatrix', '--help'): message = _( "You need to install the addon r.neighborhoodmatrix to be able" ) message += _(" to calculate area measures.\n") message += _( " You can install the addon with 'g.extension r.neighborhoodmatrix'" ) gscript.fatal(message) raster_statistics = options['raster_statistics'].split( ',') if options['raster_statistics'] else [] separator = gscript.separator(options['separator']) processes = int(options['processes']) output_header = ['cat'] output_dict = collections.defaultdict(list) raster_stat_dict = { 'zone': 0, 'min': 4, 'third_quart': 16, 'max': 5, 'sum': 12, 'null_cells': 3, 'median': 15, 'label': 1, 'first_quart': 14, 'range': 6, 'mean_of_abs': 8, 'stddev': 9, 'non_null_cells': 2, 'coeff_var': 11, 'variance': 10, 'sum_abs': 13, 'perc_90': 17, 'mean': 7 } geometry_stat_dict = { 'cat': 0, 'area': 1, 'perimeter': 2, 'compact_square': 3, 'compact_circle': 4, 'fd': 5, 'xcoords': 6, 'ycoords': 7 } if flags['r']: gscript.use_temp_region() gscript.run_command('g.region', raster=segment_map) stats_temp_file = gscript.tempfile() if area_measures: gscript.message(_("Calculating geometry statistics...")) output_header += area_measures stat_indices = [geometry_stat_dict[x] for x in area_measures] gscript.run_command('r.object.geometry', input_=segment_map, output=stats_temp_file, overwrite=True, quiet=True) firstline = True with open(stats_temp_file, 'r') as fin: for line in fin: if firstline: firstline = False continue values = line.rstrip().split('|') output_dict[values[0]] = [values[x] for x in stat_indices] if rasters: if not flags['c']: gscript.message(_("Checking usability of raster maps...")) rasters_to_remove = [] for raster in rasters: null_values_found = False if not gscript.find_file(raster, element='cell')['name']: gscript.message(_("Cannot find raster '%s'" % raster)) gscript.message(_("Removing this raster from list.")) rasters_to_remove.append(raster) continue current_mapset = gscript.gisenv()['MAPSET'] if gscript.find_file('MASK', element='cell', mapset=current_mapset)['name']: null_test = gscript.read_command('r.stats', flags='N', input_=['MASK', raster], quiet=True).splitlines() if '1 *' in null_test: null_values_found = True else: raster_info = gscript.parse_command('r.univar', flags='g', map_=raster, quiet=True) if len(raster_info) == 0 or int( raster_info['null_cells']) > 0: null_values_found = True if null_values_found: message = 'Raster <%s> contains null values.\n' % raster message += 'This can lead to errors in the calculations.\n' message += 'Check region settings and raster extent.\n' message += 'Possibly fill null values of raster.\n' message += 'Removing this raster from list.' gscript.warning(message) rasters_to_remove.append(raster) for raster in rasters_to_remove: rasters.remove(raster) if len(rasters) > 0: gscript.message( _("Calculating statistics for the following raster maps:")) gscript.message(','.join(rasters)) if len(rasters) < processes: processes = len(rasters) gscript.message( _("Only one process per raster. Reduced number of processes to %i." % processes)) stat_indices = [raster_stat_dict[x] for x in raster_statistics] pool = Pool(processes) func = partial(worker, segment_map, stats_temp_file) pool.map(func, rasters) pool.close() pool.join() for raster in rasters: rastername = raster.split('@')[0] rastername = rastername.replace('.', '_') temp_file = stats_temp_file + '.' + rastername output_header += [ rastername + "_" + x for x in raster_statistics ] firstline = True with open(temp_file, 'r') as fin: for line in fin: if firstline: firstline = False continue values = line.rstrip().split('|') output_dict[values[0]] = output_dict[values[0]] + [ values[x] for x in stat_indices ] # Calculating neighborhood statistics if requested if neighborhood: gscript.message(_("Calculating neighborhood statistics...")) # Add neighbordhood statistics to headers original_nb_values = len(output_header) - 1 new_headers = ['neighbors_count'] for i in range(1, len(output_header)): new_headers.append('%s_nbrmean' % output_header[i]) new_headers.append('%s_nbrstddev' % output_header[i]) output_header += new_headers # Get sorted neighborhood matrix nbr_matrix = sorted([ x.split('|') for x in gscript.read_command('r.neighborhoodmatrix', input_=segment_map, flags='d', quiet=True).splitlines() ]) # Calculate mean and stddev of neighbor values for each variable in the # output_dict for key, group in groupby(nbr_matrix, lambda x: x[0]): d = {} for i in range(original_nb_values): d[i] = (0, 0, 0) nbrlist = [str(x[1]) for x in group] if len(nbrlist) > 1: for nbr in nbrlist: for i in range(original_nb_values): d[i] = update(d[i], float(output_dict[nbr][i])) output_dict[key] = output_dict[key] + [str(len(nbrlist))] output_dict[key] = output_dict[key] + [ str(i) for sub in [finalize(x) for x in d.values()] for i in sub ] else: newvalues = ['1'] nbr = nbrlist[0] for i in range(original_nb_values): newvalues.append(output_dict[nbr][i]) newvalues.append('0') output_dict[key] = output_dict[key] + newvalues message = _("Some values could not be calculated for the objects below. ") message += _("These objects are thus not included in the results. ") message += _("HINT: Check some of the raster maps for null values ") message += _("and possibly fill these values with r.fillnulls.") error_objects = [] if csvfile: with open(csvfile, 'w') as f: f.write(separator.join(output_header) + "\n") for key in output_dict: if len(output_dict[key]) + 1 == len(output_header): f.write(key + separator + separator.join(output_dict[key]) + "\n") else: error_objects.append(key) f.close() if vectormap: gscript.message(_("Creating output vector map...")) temporary_vect = 'segmstat_tmp_vect_%d' % os.getpid() gscript.run_command('r.to.vect', input_=segment_map, output=temporary_vect, type_='area', flags='vt', overwrite=True, quiet=True) insert_sql = gscript.tempfile() fsql = open(insert_sql, 'w') fsql.write('BEGIN TRANSACTION;\n') if gscript.db_table_exist(temporary_vect): if gscript.overwrite(): fsql.write('DROP TABLE %s;' % temporary_vect) else: gscript.fatal( _("Table %s already exists. Use --o to overwrite" % temporary_vect)) create_statement = 'CREATE TABLE ' + temporary_vect + ' (cat int PRIMARY KEY);\n' fsql.write(create_statement) for header in output_header[1:]: addcol_statement = 'ALTER TABLE %s ADD COLUMN %s double precision;\n' % ( temporary_vect, header) fsql.write(addcol_statement) for key in output_dict: if len(output_dict[key]) + 1 == len(output_header): sql = "INSERT INTO %s VALUES (%s, %s);\n" % ( temporary_vect, key, ",".join(output_dict[key])) sql = sql.replace('inf', 'NULL') sql = sql.replace('nan', 'NULL') fsql.write(sql) else: if not csvfile: error_objects.append(key) fsql.write('END TRANSACTION;') fsql.close() gscript.run_command('db.execute', input=insert_sql, quiet=True) gscript.run_command('v.db.connect', map_=temporary_vect, table=temporary_vect, quiet=True) gscript.run_command('g.copy', vector="%s,%s" % (temporary_vect, vectormap), quiet=True) if error_objects: object_string = ', '.join(error_objects[:100]) message += _( "\n\nObjects with errors (only first 100 are shown):\n%s" % object_string) gscript.message(message)
def main(): r_elevation = options['elevation'].split('@')[0] v_habitat = options['vector'].split('@')[0] v_column = options['column'] regext = options['region_extension'] directory = options['dir'] prefix = options['prefix'] r_accumulation = prefix + '_accumulation' r_drainage = prefix + '_drainage' r_tci = prefix + '_topographicindex' r_slope = prefix + '_slope' r_aspect = prefix + '_aspect' r_flow_accum = prefix + '_flow_accumulation' r_LS = prefix + '_LS_factor' r_habitat = prefix + '_' + v_habitat + '_rast' r_geomorphon = prefix + '_' + r_elevation + '_geomorphon' r_beam_rad = prefix + '_beam_rad' f_habitat_small_areas = prefix + '_small_areas.csv' f_habitat_geomorphons = prefix + '_habitat_geomorphons.csv' f_habitat_geomorphons_pivot = prefix + '_habitat_geomorphons_pivot.csv' f_LS_colorrules = prefix + '_LS_color.txt' t_habitat_geomorphons = prefix + '_habgeo_tab' t_habitat_geomorphons_pivot = prefix + '_habgeo_tab_pivot' d_start_time = options['start_time'] d_end_time = options['end_time'] d_time_step = options['time_step'] d_day = options['day'] d_year = options['year'] saved_region = prefix + '_region_ori' current_region = grass.region() X = float(regext) N = current_region["n"] S = current_region["s"] E = current_region["e"] W = current_region["w"] Yres = current_region['nsres'] Xres = current_region['ewres'] PixelArea = Xres * Yres global tmp ## check if r.geomorphon addon is installed if not grass.find_program('r.geomorphon', '--help'): grass.fatal( _("The 'r.geomorphon' addon was not found, install it first:") + "\n" + "g.extension r.geomorphon") ## check if r.geomorphon addon is installed if not grass.find_program('r.sun.hourly', '--help'): grass.fatal( _("The 'r.sun.hourly' addon was not found, install it first:") + "\n" + "g.extension r.sun.hourly") # Region settings grass.message("Current region will be saved and extended.") grass.message("----") # Print and save current region grass.message("Current region:") grass.message("n, s, e, w") grass.message([current_region[key] for key in "nsew"]) grass.run_command('g.region', save=saved_region, overwrite=True) grass.message("Current region saved.") grass.message("----") # does vector map exist in CURRENT mapset? mapset = grass.gisenv()['MAPSET'] exists = bool( grass.find_file(v_habitat, element='vector', mapset=mapset)['file']) if not exists: grass.fatal( _("Vector map <%s> not found in current mapset") % v_habitat) # Align region to elevation raster and habitat vector grass.message("Align region to elevation raster and habitat vector ...") grass.run_command('g.region', flags='a', rast=r_elevation, vect=v_habitat, align=r_elevation) grass.message("Alignment done.") aligned_region = grass.region() Naligned = aligned_region["n"] Saligned = aligned_region["s"] Ealigned = aligned_region["e"] Waligned = aligned_region["w"] grass.message("Aligned region:") grass.message("n, s, e, w") grass.message([aligned_region[key] for key in "nsew"]) grass.message("----") # Extend region grass.message("Extend region by") grass.message(regext) grass.message("in all directions") grass.run_command('g.region', n=Naligned + X, s=Saligned - X, e=Ealigned + X, w=Waligned - X) grass.message("Region extension done.") extended_region = grass.region() grass.message("Extended region:") grass.message("n, s, e, w") grass.message([extended_region[key] for key in "nsew"]) grass.message("----") # Watershed calculation: accumulation, drainage direction, topographic index grass.message( "Calculation of accumulation, drainage direction, topographic index by r.watershed ..." ) grass.run_command('r.watershed', elevation=r_elevation, accumulation=r_accumulation, drainage=r_drainage, tci=r_tci, convergence=5, flags='am', overwrite=True) grass.message( "Calculation of accumulation, drainage direction, topographic done.") grass.message("----") # Calculation of slope and aspect maps grass.message("Calculation of slope and aspect by r.slope.aspect ...") grass.run_command('r.slope.aspect', elevation=r_elevation, slope=r_slope, aspect=r_aspect, overwrite=True) grass.message("Calculation of slope and aspect done.") grass.message("----") # Calculate pixel area by nsres x ewres grass.message("Pixel area:") grass.message(PixelArea) grass.message("----") # Calculate habitat area and populate it to the attribute table grass.message( "Calculate habitat's areas and populate it to the attribute table ...") grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="habarea double") grass.run_command("v.to.db", map=v_habitat, option='area', layer=1, columns='habarea', overwrite=True) grass.message("Calculate habitat's areas done.") grass.message("----") # Show habitat areas smaller than Pixel Area grass.message("Habitat areas smaller than pixel area.") grass.run_command("v.db.select", map=v_habitat, flags='v', layer=1, columns=v_column, where="habarea < %s" % (PixelArea)) smallareacsv = os.path.join(directory, f_habitat_small_areas) grass.run_command("v.db.select", map=v_habitat, flags='v', layer=1, columns=v_column, file=smallareacsv, where="habarea < %s" % (PixelArea)) grass.message( "A list of habitat areas smaller than pixel area can be found in: ") grass.message(smallareacsv) grass.message("----") # Mark habitats smaller than pixel area in attribute table grass.message( "Mark habitats smaller than pixel area in attribute table ...") grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s_smallarea varchar(1)" % prefix) grass.run_command("v.db.update", map=v_habitat, layer=1, column='%s_smallarea' % (prefix), value='*', where='habarea < %s' % (PixelArea)) grass.message("See column") grass.message('%s_smallarea' % prefix) grass.message("marked by *.") grass.message("----") # Upload DEM zonal statistics to the attribute table grass.message("Upload DEM zonal statistics to the attribute table ...") grass.run_command("v.rast.stats", map=v_habitat, flags='c', layer=1, raster=r_elevation, column_prefix=prefix + '_dem', method='minimum,maximum,range,average,median') grass.message("Upload DEM zonal statistics done.") grass.message("----") # Upload slope zonal statistics to the attribute table grass.message("Upload slope zonal statistics to the attribute table ...") grass.run_command("v.rast.stats", map=v_habitat, flags='c', layer=1, raster=r_slope, column_prefix=prefix + '_slope', method='minimum,maximum,range,average,median') grass.message("Upload slope zonal statistics done.") grass.message("----") # Upload slope zonal statistics to the attribute table grass.message("Upload aspect zonal statistics to the attribute table ...") grass.run_command("v.rast.stats", map=v_habitat, flags='c', layer=1, raster=r_aspect, column_prefix=prefix + '_aspect', method='minimum,maximum,range,average,median') grass.message("Upload aspect zonal statistics done.") grass.message("----") # Do some simple checks regarding aspect range grass.message( "Do some simple checks regarding aspect range and populate it to the attribute table..." ) grass.message("aspect range 100-200 *") grass.message("aspect range 201-300 **") grass.message("aspect range >= 300 ***") grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s varchar(3)" % (prefix + '_check_aspect_range')) grass.run_command( "db.execute", sql="UPDATE %s SET %s ='*' WHERE %s < 200 AND %s >= 100" % (v_habitat, prefix + '_check_aspect_range', prefix + '_aspect_range', prefix + '_aspect_range')) grass.run_command( "db.execute", sql="UPDATE %s SET %s ='**' WHERE %s < 300 AND %s >= 200" % (v_habitat, prefix + '_check_aspect_range', prefix + '_aspect_range', prefix + '_aspect_range')) grass.run_command( "db.execute", sql="UPDATE %s SET %s ='***' WHERE %s >= 300" % (v_habitat, prefix + '_check_aspect_range', prefix + '_aspect_range')) grass.message("Simple checks regarding aspect range done.") grass.message("----") # Do some simple checks regarding aspect and and slope grass.message( "Do some simple checks regarding aspect range and slope median and populate it to the attribute table..." ) grass.message("aspect range 100-200 and median slope < 5 *") grass.message("aspect range 201-300 and median slope < 5 **") grass.message("aspect range >= 300 and median slope < 5 ***") grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s varchar(3)" % (prefix + '_check_aspect_slope')) grass.run_command( "db.execute", sql="UPDATE %s SET %s ='*' WHERE (%s < 200 AND %s >= 100) AND %s < 5" % (v_habitat, prefix + '_check_aspect_slope', prefix + '_aspect_range', prefix + '_aspect_range', prefix + '_slope_median')) grass.run_command( "db.execute", sql="UPDATE %s SET %s ='**' WHERE (%s < 300 AND %s >= 200) AND %s < 5" % (v_habitat, prefix + '_check_aspect_slope', prefix + '_aspect_range', prefix + '_aspect_range', prefix + '_slope_median')) grass.run_command( "db.execute", sql="UPDATE %s SET %s ='***' WHERE %s >= 300 AND %s < 5" % (v_habitat, prefix + '_check_aspect_slope', prefix + '_aspect_range', prefix + '_slope_median')) grass.message( "Simple checks regarding aspect range and median slope done.") grass.message("----") # Do some simple checks regarding aspect and and slope grass.message("Convert habitat vector to raster ...") grass.run_command("v.to.rast", input=v_habitat, layer=1, type='area', use='attr', attrcolumn=v_column, output=r_habitat) grass.message("Conversion done.") grass.message("----") # Calculate the most common geomorphons grass.message("Calculate the most common geomorphons ...") grass.run_command("r.geomorphon", elevation=r_elevation, skip=0, search=3, flat=1, dist=0, forms=r_geomorphon) grass.message("Geomorphon calculations done.") grass.message("----") # Mutual occurrence (coincidence) of categories of habitats and geomorphons grass.message( "Calculate mutual occurrences of habitats and geomorphons ...") grass.message("1 - flat") grass.message("2 - summit") grass.message("3 - ridge") grass.message("4 - shoulder") grass.message("5 - spur") grass.message("6 - slope") grass.message("7 - hollow") grass.message("8 - footslope") grass.message("9 - valley") grass.message("10 - depression") grass.message(" ") grass.message("Mutual occurrence in percent of the row") grass.run_command("r.coin", first=r_habitat, second=r_geomorphon, flags='w', units='y') grass.message("Calculations of mutual occurrences done.") grass.message("----") # Join geomorphons to habitat attribute table grass.message( "Join geomorphon information to habitat attribute table ....") habgeocsv = os.path.join(directory, f_habitat_geomorphons) grass.run_command("r.stats", input=[r_habitat, r_geomorphon], flags='aln', separator=';', output=habgeocsv) grass.run_command("db.in.ogr", input=habgeocsv, output=t_habitat_geomorphons) grass.run_command("db.dropcolumn", table=t_habitat_geomorphons, column='field_2', flags='f') grass.run_command("db.dropcolumn", table=t_habitat_geomorphons, column='field_3', flags='f') habgeocsv_pivot = os.path.join(directory, f_habitat_geomorphons_pivot) grass.run_command( "db.select", separator=';', output=habgeocsv_pivot, sql= "SELECT field_1, sum(case when field_4 = 'flat' then field_5 end) as flat, sum(case when field_4 = 'summit' then field_5 end) as summit, sum(case when field_4 = 'ridge' then field_5 end) as ridge, sum(case when field_4 = 'shoulder' then field_5 end) as shoulder, sum(case when field_4 = 'spur' then field_5 end) as spur, sum(case when field_4 = 'slope' then field_5 end) as slope, sum(case when field_4 = 'hollow' then field_5 end) as hollow, sum(case when field_4 = 'footslope' then field_5 end) as footslope, sum(case when field_4 = 'valley' then field_5 end) as valley, sum(case when field_4 = 'depression' then field_5 end) as depression , sum(field_5) as SubTotal FROM %s GROUP BY field_1" % t_habitat_geomorphons) grass.run_command("db.in.ogr", input=habgeocsv_pivot, output=t_habitat_geomorphons_pivot) grass.run_command("v.db.join", map=v_habitat, column=v_column, other_table=t_habitat_geomorphons_pivot, other_column='field_1') grass.message("Geomorphon information joint to habitat attribute table.") grass.message("...") grass.message("Calculating percent geomorphon of habitat area ...") # add column for percent geomorphon grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s_perc_flat double precision" % prefix) grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s_perc_summit double precision" % prefix) grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s_perc_ridge double precision" % prefix) grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s_perc_shoulder double precision" % prefix) grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s_perc_spur double precision" % prefix) grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s_perc_slope double precision" % prefix) grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s_perc_hollow double precision" % prefix) grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s_perc_footslope double precision" % prefix) grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s_perc_valley double precision" % prefix) grass.run_command("v.db.addcolumn", map=v_habitat, layer=1, columns="%s_perc_depression double precision" % prefix) # calculate percent geomorphon grass.run_command( "v.db.update", map=v_habitat, layer=1, column="%s_perc_flat" % prefix, query_column="cast(flat AS real) / cast( SubTotal AS real) * 100.0") grass.run_command( "v.db.update", map=v_habitat, layer=1, column="%s_perc_summit" % prefix, query_column="cast(summit AS real) / cast( SubTotal AS real) * 100.0") grass.run_command( "v.db.update", map=v_habitat, layer=1, column="%s_perc_ridge" % prefix, query_column="cast(ridge AS real) / cast( SubTotal AS real) * 100.0") grass.run_command( "v.db.update", map=v_habitat, layer=1, column="%s_perc_shoulder" % prefix, query_column="cast(shoulder AS real) / cast( SubTotal AS real) * 100.0" ) grass.run_command( "v.db.update", map=v_habitat, layer=1, column="%s_perc_spur" % prefix, query_column="cast(spur AS real) / cast( SubTotal AS real) * 100.0") grass.run_command( "v.db.update", map=v_habitat, layer=1, column="%s_perc_slope" % prefix, query_column="cast(slope AS real) / cast( SubTotal AS real) * 100.0") grass.run_command( "v.db.update", map=v_habitat, layer=1, column="%s_perc_hollow" % prefix, query_column="cast(hollow AS real) / cast( SubTotal AS real) * 100.0") grass.run_command( "v.db.update", map=v_habitat, layer=1, column="%s_perc_footslope" % prefix, query_column="cast(footslope AS real) / cast( SubTotal AS real) * 100.0" ) grass.run_command( "v.db.update", map=v_habitat, layer=1, column="%s_perc_valley" % prefix, query_column="cast(valley AS real) / cast( SubTotal AS real) * 100.0") grass.run_command( "v.db.update", map=v_habitat, layer=1, column="%s_perc_depression" % prefix, query_column= "cast(depression AS real) / cast( SubTotal AS real) * 100.0") grass.message(" ") grass.message("Calculating of percent geomorphon of habitat area done.") grass.message("----") # Give information where output files are grass.message("Geomorphon information:") grass.message(habgeocsv) grass.message("Geomorphon information in pivot format:") grass.message(habgeocsv_pivot) grass.message("----") # Calculate LS factor see Neteler & Mitasova 2008. Open Source GIS - A GRASS GIS Approach grass.message("Calculate LS factor ...") grass.run_command("r.flow", elevation=r_elevation, aspect=r_aspect, flowaccumulation=r_flow_accum) grass.message("...") grass.mapcalc( "$outmap = 1.4 * exp($flowacc * $resolution / 22.1, 0.4) * exp(sin($slope) / 0.09, 1.2)", outmap=r_LS, flowacc=r_flow_accum, resolution=Xres, slope=r_slope) # create and define color rules file for LS factor map ls_color_rules_out = os.path.join(directory, f_LS_colorrules) with open(ls_color_rules_out, 'w') as f: writer = csv.writer(f) writer.writerow(['0 white']) writer.writerow(['3 yellow']) writer.writerow(['6 orange']) writer.writerow(['10 red']) writer.writerow(['50 magenta']) writer.writerow(['100 violet']) grass.run_command("r.colors", map=r_LS, rules=ls_color_rules_out) grass.message("Calculation LS factor done.") grass.message("----") # Run r.sun.hourly in binary mode for light/shadow grass.message( "Run r.sun.hourly in binary mode for light/shadow for a certain day in the year ..." ) grass.run_command("r.sun.hourly", elevation=r_elevation, flags='tb', aspect=r_aspect, slope=r_slope, start_time=d_start_time, end_time=d_end_time, day=d_day, year=d_year, beam_rad_basename=r_beam_rad) grass.message("----") grass.message("Light/shadow conditions calculated for year") grass.message(d_year) grass.message("and day") grass.message(d_day) grass.message('from') grass.message(d_start_time) grass.message('to') grass.message(d_end_time) grass.message('done.') grass.message("----") grass.run_command("t.info", flags='h', input=r_beam_rad) grass.message("----") # Set region to original grass.message("Restore original region settings:") grass.run_command("g.region", flags='p', region=saved_region) grass.message("----") # clean up some temporay files and maps grass.message("Some clean up ...") grass.run_command("g.remove", flags="f", type="region", name=saved_region) grass.run_command("g.remove", flags="f", type="raster", name=r_flow_accum) grass.run_command("g.remove", flags="f", type="raster", name=r_habitat) grass.run_command("db.droptable", flags='f', table=t_habitat_geomorphons) grass.run_command("db.droptable", flags='f', table=t_habitat_geomorphons_pivot) grass.run_command("db.dropcolumn", flags='f', table=v_habitat, column='field_1') grass.message("Clean up done.") grass.message("----") # v.habitat.dem done! grass.message("v.habitat.dem done!")
def main(): global rm_regions, rm_rasters, rm_vectors ### check if the i.sentinel.download addons is installed if not grass.find_program('i.sentinel.download', '--help'): grass.fatal(_("The 'i.sentinel.download' module was not found, install it first:") + "\n" + "g.extension i.sentinel") # parameters settings = options['settings'] output = options['output'] area = options['area'] if not grass.find_file(area, element='vector')['file']: grass.fatal(_("Vector map <%s> not found") % area) producttype = options['producttype'] grass.message(_("Retrieving Sentinel footprints from ESA hub ...")) fps = 'tmp_fps_%s' % str(os.getpid()) rm_vectors.append(fps) if not options['names']: s_list = grass.parse_command( 'i.sentinel.download', settings=settings, map=area, clouds=options['clouds'], producttype=producttype, start=options['start'], end=options['end'], footprints=fps, flags='lb', quiet=True) if len(s_list) == 0: grass.fatal('No products found') name_list = [x.split(' ')[1] for x in s_list] else: name_list = [] fp_list = [] for name in options['names'].split(','): real_producttype, start_day, end_day = scenename_split(name) if real_producttype != producttype: grass.fatal("Producttype of ") fpi = 'tmp_fps_%s_%s' % (name, str(os.getpid())) try: grass.run_command( 'i.sentinel.download', settings=settings, map=area, producttype=producttype, footprints=fpi, start=start_day, end=end_day, flags='bl', quiet=True) name_list.append(name) fp_list.append(fpi) rm_vectors.append(fpi) except: grass.warning('%s was not found in %s' % (name, area)) grass.run_command( 'v.patch', input=','.join(fp_list), output=fps, quiet=True) grass.message(_("Getting size of <%s> area ...") % area) areasize = get_size(area) grass.message(_("Getting size of footprints in area <%s> ...") % area) fps_in_area = 'tmp_fps_in_area_%s' % str(os.getpid()) rm_vectors.append(fps_in_area) grass.run_command( 'v.overlay', ainput=fps, atype='area', binput=area, operator='and', output=fps_in_area, quiet=True) grass.run_command( 'v.db.addcolumn', map=fps_in_area, columns="tmp INTEGER", quiet=True) grass.run_command( 'v.db.update', map=fps_in_area, column='tmp', value=1, quiet=True) # list of scenes that actually intersect with bbox name_list_updated = list(grass.parse_command('v.db.select', map=fps_in_area, column='a_identifier', flags='c').keys()) fps_in_area_dis = 'tmp_fps_in_area_dis_%s' % str(os.getpid()) rm_vectors.append(fps_in_area_dis) grass.run_command( 'v.dissolve', input=fps_in_area, output=fps_in_area_dis, column='tmp', quiet=True) grass.run_command( 'v.db.addtable', map=fps_in_area_dis, quiet=True) fpsize = get_size(fps_in_area_dis) percent = fpsize / areasize * 100.0 grass.message(_("%.2f percent of the area <%s> is covered") % (percent, area)) if options['minpercent']: if percent < int(options['minpercent']): grass.fatal("The percentage of coverage is too low (expected: %s)" % options['minpercent']) # save list of Sentinel names if output: with open(output, 'w') as f: f.write(','.join(name_list_updated)) grass.message(_( "Name of Sentinel scenes are written to file <%s>") % (output))
def main(): global vrtfile, tmpfile infile = options['input'] rast = options['output'] also = flags['a'] #### check for gdalinfo (just to check if installation is complete) if not grass.find_program('gdalinfo', '--help'): grass.fatal( _("'gdalinfo' not found, install GDAL tools first (http://www.gdal.org)" )) pid = str(os.getpid()) tmpfile = grass.tempfile() ################### let's go spotdir = os.path.dirname(infile) spotname = grass.basename(infile, 'hdf') if rast: name = rast else: name = spotname if not grass.overwrite() and grass.find_file(name)['file']: grass.fatal(_("<%s> already exists. Aborting.") % name) # still a ZIP file? (is this portable?? see the r.in.srtm script for ideas) if infile.lower().endswith('.zip'): grass.fatal(_("Please extract %s before import.") % infile) try: p = grass.Popen(['file', '-ib', infile], stdout=grass.PIPE) s = p.communicate()[0] if s == "application/x-zip": grass.fatal(_("Please extract %s before import.") % infile) except: pass ### create VRT header for NDVI projfile = os.path.join(spotdir, "0001_LOG.TXT") vrtfile = tmpfile + '.vrt' # first process the NDVI: grass.try_remove(vrtfile) create_VRT_file(projfile, vrtfile, infile) ## let's import the NDVI map... grass.message(_("Importing SPOT VGT NDVI map...")) if grass.run_command('r.in.gdal', input=vrtfile, output=name) != 0: grass.fatal(_("An error occurred. Stop.")) grass.message(_("Imported SPOT VEGETATION NDVI map <%s>.") % name) ################# ## http://www.vgt.vito.be/faq/FAQS/faq19.html # What is the relation between the digital number and the real NDVI ? # Real NDVI =coefficient a * Digital Number + coefficient b # = a * DN +b # # Coefficient a = 0.004 # Coefficient b = -0.1 # clone current region # switch to a temporary region grass.use_temp_region() grass.run_command('g.region', rast=name, quiet=True) grass.message(_("Remapping digital numbers to NDVI...")) tmpname = "%s_%s" % (name, pid) grass.mapcalc("$tmpname = 0.004 * $name - 0.1", tmpname=tmpname, name=name) grass.run_command('g.remove', flags='f', type='rast', pattern=name, quiet=True) grass.run_command('g.rename', rast=(tmpname, name), quiet=True) # write cmd history: grass.raster_history(name) #apply color table: grass.run_command('r.colors', map=name, color='ndvi', quiet=True) ########################## # second, optionally process the SM quality map: #SM Status Map # http://nieuw.vgt.vito.be/faq/FAQS/faq22.html #Data about # Bit NR 7: Radiometric quality for B0 coded as 0 if bad and 1 if good # Bit NR 6: Radiometric quality for B2 coded as 0 if bad and 1 if good # Bit NR 5: Radiometric quality for B3 coded as 0 if bad and 1 if good # Bit NR 4: Radiometric quality for MIR coded as 0 if bad and 1 if good # Bit NR 3: land code 1 or water code 0 # Bit NR 2: ice/snow code 1 , code 0 if there is no ice/snow # Bit NR 1: 0 0 1 1 # Bit NR 0: 0 1 0 1 # clear shadow uncertain cloud # #Note: # pos 7 6 5 4 3 2 1 0 (bit position) # 128 64 32 16 8 4 2 1 (values for 8 bit) # # # Bit 4-7 should be 1: their sum is 240 # Bit 3 land code, should be 1, sum up to 248 along with higher bits # Bit 2 ice/snow code # Bit 0-1 should be 0 # # A good map threshold: >= 248 if also: grass.message(_("Importing SPOT VGT NDVI quality map...")) grass.try_remove(vrtfile) qname = spotname.replace('NDV', 'SM') qfile = os.path.join(spotdir, qname) create_VRT_file(projfile, vrtfile, qfile) ## let's import the SM quality map... smfile = name + '.sm' if grass.run_command('r.in.gdal', input=vrtfile, output=smfile) != 0: grass.fatal(_("An error occurred. Stop.")) # some of the possible values: rules = [ r + '\n' for r in [ '8 50 50 50', '11 70 70 70', '12 90 90 90', '60 grey', '155 blue', '232 violet', '235 red', '236 brown', '248 orange', '251 yellow', '252 green' ] ] grass.write_command('r.colors', map=smfile, rules='-', stdin=rules) grass.message( _("Imported SPOT VEGETATION SM quality map <%s>.") % smfile) grass.message( _("Note: A snow map can be extracted by category 252 (d.rast %s cat=252)" ) % smfile) grass.message("") grass.message(_("Filtering NDVI map by Status Map quality layer...")) filtfile = "%s_filt" % name grass.mapcalc( "$filtfile = if($smfile % 4 == 3 || ($smfile / 16) % 16 == 0, null(), $name)", filtfile=filtfile, smfile=smfile, name=name) grass.run_command('r.colors', map=filtfile, color='ndvi', quiet=True) grass.message(_("Filtered SPOT VEGETATION NDVI map <%s>.") % filtfile) # write cmd history: grass.raster_history(smfile) grass.raster_history(filtfile) grass.message(_("Done."))
def main(): global rm_regions, rm_rasters, rm_vectors, tmpfolder # parameters s2names = options['s2names'].split(',') tmpdirectory = options['directory'] test_nprocs_memory() grass.message(_("Downloading Sentinel scenes ...")) if not grass.find_program('i.sentinel.download', '--help'): grass.fatal(_("The 'i.sentinel.download' module was not found, install it first:") + "\n" + "g.extension i.sentinel") if not grass.find_program('i.sentinel.import', '--help'): grass.fatal(_("The 'i.sentinel.import' module was not found, install it first:") + "\n" + "g.extension i.sentinel") if not grass.find_program('i.sentinel.parallel.download', '--help'): grass.fatal(_("The 'i.sentinel.parallel.download' module was not found, install it first:") + "\n" + "g.extension i.sentinel") # create temporary directory to download data if tmpdirectory: if not os.path.isdir(tmpdirectory): try: os.makedirs(tmpdirectory) except: grass.fatal(_("Unable to create temp dir")) else: tmpdirectory = grass.tempdir() tmpfolder = tmpdirectory if os.path.isfile(s2names[0]): with open(s2names[0], 'r') as f: s2namesstr = f.read() else: s2namesstr = ','.join(s2names) grass.run_command( 'i.sentinel.parallel.download', settings=options['settings'], scene_name=s2namesstr, nprocs=options['nprocs'], output=tmpdirectory, flags="fs", quiet=True) grass.message(_("Importing Sentinel scenes ...")) env = grass.gisenv() start_gisdbase = env['GISDBASE'] start_location = env['LOCATION_NAME'] start_cur_mapset = env['MAPSET'] if len(s2namesstr.split(',')) < int(options['nprocs']): procs_import = len(s2namesstr.split(',')) else: procs_import = int(options['nprocs']) ### save current region id = str(os.getpid()) currentregion = 'tmp_region_' + id grass.run_command('g.region', save=currentregion, flags='p') queue_import = ParallelModuleQueue(nprocs=procs_import) memory_per_proc = round(float(options['memory'])/procs_import) mapsetids = [] importflag = 'r' if flags['i']: importflag += 'i' if flags['c']: importflag += 'c' json_standard_folder = os.path.join(env['GISDBASE'], env['LOCATION_NAME'], env['MAPSET'], 'cell_misc') if not os.path.isdir(json_standard_folder): os.makedirs(json_standard_folder) for idx,subfolder in enumerate(os.listdir(tmpdirectory)): if os.path.isdir(os.path.join(tmpdirectory, subfolder)): mapsetid = 'S2_import_%s' %(str(idx+1)) mapsetids.append(mapsetid) directory = os.path.join(tmpdirectory, subfolder) i_sentinel_import = Module( 'i.sentinel.import.worker', input=directory, mapsetid=mapsetid, memory=memory_per_proc, pattern=options['pattern'], flags=importflag, region=currentregion, metadata=json_standard_folder, run_=False ) queue_import.put(i_sentinel_import) queue_import.wait() grass.run_command('g.remove', type='region', name=currentregion, flags='f') # verify that switching the mapset worked env = grass.gisenv() gisdbase = env['GISDBASE'] location = env['LOCATION_NAME'] cur_mapset = env['MAPSET'] if cur_mapset != start_cur_mapset: grass.fatal("New mapset is <%s>, but should be <%s>" % (cur_mapset, start_cur_mapset)) # copy maps to current mapset maplist = [] cloudlist = [] for new_mapset in mapsetids: for vect in grass.parse_command('g.list', type='vector', mapset=new_mapset): cloudlist.append(vect) grass.run_command('g.copy', vector=vect + '@' + new_mapset + ',' + vect) for rast in grass.parse_command('g.list', type='raster', mapset=new_mapset): maplist.append(rast) grass.run_command('g.copy', raster=rast + '@' + new_mapset + ',' + rast) # set nulls grass.run_command('i.zero2null', map=rast, quiet=True) grass.utils.try_rmdir(os.path.join(gisdbase, location, new_mapset)) # space time dataset grass.message(_("Creating STRDS of Sentinel scenes ...")) if options['strds_output']: strds = options['strds_output'] grass.run_command( 't.create', output=strds, title="Sentinel-2", desc="Sentinel-2", quiet=True) # create register file registerfile = grass.tempfile() file = open(registerfile, 'w') for imp_rast in list(set(maplist)): date_str1 = imp_rast.split('_')[1].split('T')[0] date_str2 = "%s-%s-%s" % (date_str1[:4], date_str1[4:6], date_str1[6:]) time_str = imp_rast.split('_')[1].split('T')[1] clock_str2 = "%s:%s:%s" % (time_str[:2], time_str[2:4], time_str[4:]) file.write("%s|%s %s\n" % (imp_rast, date_str2, clock_str2)) file.close() grass.run_command('t.register', input=strds, file=registerfile, quiet=True) # remove registerfile grass.try_remove(registerfile) if flags['c']: stvdsclouds = strds + '_clouds' grass.run_command( 't.create', output=stvdsclouds, title="Sentinel-2 clouds", desc="Sentinel-2 clouds", quiet=True, type='stvds') registerfileclouds = grass.tempfile() fileclouds = open(registerfileclouds, 'w') for imp_clouds in cloudlist: date_str1 = imp_clouds.split('_')[1].split('T')[0] date_str2 = "%s-%s-%s" % (date_str1[:4], date_str1[4:6], date_str1[6:]) time_str = imp_clouds.split('_')[1].split('T')[1] clock_str2 = "%s:%s:%s" % (time_str[:2], time_str[2:4], time_str[4:]) fileclouds.write("%s|%s %s\n" % (imp_clouds, date_str2, clock_str2)) fileclouds.close() grass.run_command( 't.register', type='vector', input=stvdsclouds, file=registerfileclouds, quiet=True) grass.message("<%s> is created" % (stvdsclouds)) # remove registerfile grass.try_remove(registerfileclouds) # extract strds for each band bands = [] pattern = options['pattern'] if "(" in pattern: global beforebrackets, afterbrackets beforebrackets = re.findall(r"(.*?)\(", pattern)[0] inbrackets = re.findall(r"\((.*?)\)", pattern)[0] afterbrackets = re.findall(r"\)(.*)", pattern)[0] bands = ["%s%s%s" % (beforebrackets, x, afterbrackets) for x in inbrackets.split('|')] else: bands = pattern.split('|') for band in bands: if flags['i'] and ('20' in band or '60' in band): band.replace('20', '10').replace('60', '10') grass.run_command('t.rast.extract', input=strds, where="name like '%" + band + "%'", output="%s_%s" % (strds, band), quiet=True) grass.message("<%s_%s> is created" % (strds, band))
def main(): # Hard-coded parameters needed for USGS datasets usgs_product_dict = { "ned": { 'product': 'National Elevation Dataset (NED)', 'dataset': { 'ned1sec': (1. / 3600, 30, 100), 'ned13sec': (1. / 3600 / 3, 10, 30), 'ned19sec': (1. / 3600 / 9, 3, 10) }, 'subset': {}, 'extent': [ '1 x 1 degree', '15 x 15 minute' ], 'format': 'IMG', 'extension': 'img', 'zip': True, 'srs': 'wgs84', 'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs", 'interpolation': 'bilinear', 'url_split': '/' }, "nlcd": { 'product': 'National Land Cover Database (NLCD)', 'dataset': { 'National Land Cover Database (NLCD) - 2001': (1. / 3600, 30, 100), 'National Land Cover Database (NLCD) - 2006': (1. / 3600, 30, 100), 'National Land Cover Database (NLCD) - 2011': (1. / 3600, 30, 100) }, 'subset': { 'Percent Developed Imperviousness', 'Percent Tree Canopy', 'Land Cover' }, 'extent': ['3 x 3 degree'], 'format': 'GeoTIFF', 'extension': 'tif', 'zip': True, 'srs': 'wgs84', 'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs", 'interpolation': 'nearest', 'url_split': '/' }, "naip": { 'product': 'USDA National Agriculture Imagery Program (NAIP)', 'dataset': { 'Imagery - 1 meter (NAIP)': (1. / 3600 / 27, 1, 3)}, 'subset': {}, 'extent': [ '3.75 x 3.75 minute', ], 'format': 'JPEG2000', 'extension': 'jp2', 'zip': False, 'srs': 'wgs84', 'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs", 'interpolation': 'nearest', 'url_split': '/' }, "lidar": { 'product': 'Lidar Point Cloud (LPC)', 'dataset': { 'Lidar Point Cloud (LPC)': (1. / 3600 / 9, 3, 10)}, 'subset': {}, 'extent': [''], 'format': 'LAS,LAZ', 'extension': 'las,laz', 'zip': True, 'srs': '', 'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs", 'interpolation': 'nearest', 'url_split': '/' } } # Set GRASS GUI options and flags to python variables gui_product = options['product'] # Variable assigned from USGS product dictionary nav_string = usgs_product_dict[gui_product] product = nav_string['product'] product_format = nav_string['format'] product_extensions = tuple(nav_string['extension'].split(',')) product_is_zip = nav_string['zip'] product_srs = nav_string['srs'] product_proj4 = nav_string['srs_proj4'] product_interpolation = nav_string['interpolation'] product_url_split = nav_string['url_split'] product_extent = nav_string['extent'] gui_subset = None # Parameter assignments for each dataset if gui_product == 'ned': gui_dataset = options['ned_dataset'] ned_api_name = '' if options['ned_dataset'] == 'ned1sec': ned_data_abbrv = 'ned_1arc_' ned_api_name = '1 arc-second' if options['ned_dataset'] == 'ned13sec': ned_data_abbrv = 'ned_13arc_' ned_api_name = '1/3 arc-second' if options['ned_dataset'] == 'ned19sec': ned_data_abbrv = 'ned_19arc_' ned_api_name = '1/9 arc-second' product_tag = product + " " + ned_api_name if gui_product == 'nlcd': gui_dataset = options['nlcd_dataset'] if options['nlcd_dataset'] == 'nlcd2001': gui_dataset = 'National Land Cover Database (NLCD) - 2001' if options['nlcd_dataset'] == 'nlcd2006': gui_dataset = 'National Land Cover Database (NLCD) - 2006' if options['nlcd_dataset'] == 'nlcd2011': gui_dataset = 'National Land Cover Database (NLCD) - 2011' if options['nlcd_subset'] == 'landcover': gui_subset = 'Land Cover' if options['nlcd_subset'] == 'impervious': gui_subset = 'Percent Developed Imperviousness' if options['nlcd_subset'] == 'canopy': gui_subset = 'Percent Tree Canopy' product_tag = gui_dataset if gui_product == 'naip': gui_dataset = 'Imagery - 1 meter (NAIP)' product_tag = nav_string['product'] has_pdal = gscript.find_program(pgm='v.in.pdal') if gui_product == 'lidar': gui_dataset = 'Lidar Point Cloud (LPC)' product_tag = nav_string['product'] if not has_pdal: gscript.warning(_("Module v.in.pdal is missing," " any downloaded data will not be processed.")) # Assigning further parameters from GUI gui_output_layer = options['output_name'] gui_resampling_method = options['resampling_method'] gui_i_flag = flags['i'] gui_k_flag = flags['k'] work_dir = options['output_directory'] memory = options['memory'] nprocs = options['nprocs'] preserve_extracted_files = gui_k_flag use_existing_extracted_files = True preserve_imported_tiles = gui_k_flag use_existing_imported_tiles = True if not os.path.isdir(work_dir): gscript.fatal(_("Directory <{}> does not exist." " Please create it.").format(work_dir)) # Returns current units try: proj = gscript.parse_command('g.proj', flags='g') if gscript.locn_is_latlong(): product_resolution = nav_string['dataset'][gui_dataset][0] elif float(proj['meters']) == 1: product_resolution = nav_string['dataset'][gui_dataset][1] else: # we assume feet product_resolution = nav_string['dataset'][gui_dataset][2] except TypeError: product_resolution = False if gui_product == 'lidar' and options['resolution']: product_resolution = float(options['resolution']) if gui_resampling_method == 'default': gui_resampling_method = nav_string['interpolation'] gscript.verbose(_("The default resampling method for product {product} is {res}").format(product=gui_product, res=product_interpolation)) # Get coordinates for current GRASS computational region and convert to USGS SRS gregion = gscript.region() wgs84 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' min_coords = gscript.read_command('m.proj', coordinates=(gregion['w'], gregion['s']), proj_out=wgs84, separator='comma', flags='d') max_coords = gscript.read_command('m.proj', coordinates=(gregion['e'], gregion['n']), proj_out=wgs84, separator='comma', flags='d') min_list = min_coords.split(',')[:2] max_list = max_coords.split(',')[:2] list_bbox = min_list + max_list str_bbox = ",".join((str(coord) for coord in list_bbox)) # Format variables for TNM API call gui_prod_str = str(product_tag) datasets = quote_plus(gui_prod_str) prod_format = quote_plus(product_format) prod_extent = quote_plus(product_extent[0]) # Create TNM API URL base_TNM = "https://viewer.nationalmap.gov/tnmaccess/api/products?" datasets_TNM = "datasets={0}".format(datasets) bbox_TNM = "&bbox={0}".format(str_bbox) prod_format_TNM = "&prodFormats={0}".format(prod_format) TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM if gui_product == 'nlcd': TNM_API_URL += "&prodExtents={0}".format(prod_extent) gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL)) # Query TNM API try_again_messge = _("Possibly, the query has timed out. Check network configuration and try again.") try: TNM_API_GET = urlopen(TNM_API_URL, timeout=12) except HTTPError as error: gscript.fatal(_( "HTTP(S) error from USGS TNM API:" " {code}: {reason} ({instructions})").format( reason=error.reason, code=error.code, instructions=try_again_messge)) except (URLError, OSError, IOError) as error: # Catching also SSLError and potentially others which are # subclasses of IOError in Python 2 and of OSError in Python 3. gscript.fatal(_( "Error accessing USGS TNM API: {error} ({instructions})").format( error=error, instructions=try_again_messge)) # Parse return JSON object from API query try: return_JSON = json.load(TNM_API_GET) if return_JSON['errors']: TNM_API_error = return_JSON['errors'] api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error)) gscript.fatal(api_error_msg) if gui_product == 'lidar' and options['title_filter']: return_JSON['items'] = [item for item in return_JSON['items'] if options['title_filter'] in item['title']] return_JSON['total'] = len(return_JSON['items']) except: gscript.fatal(_("Unable to load USGS JSON object.")) # Functions down_list() and exist_list() used to determine # existing files and those that need to be downloaded. def down_list(): dwnld_url.append(TNM_file_URL) dwnld_size.append(TNM_file_size) TNM_file_titles.append(TNM_file_title) if product_is_zip: extract_zip_list.append(local_zip_path) if f['datasets'][0] not in dataset_name: if len(dataset_name) <= 1: dataset_name.append(str(f['datasets'][0])) def exist_list(): exist_TNM_titles.append(TNM_file_title) exist_dwnld_url.append(TNM_file_URL) if product_is_zip: exist_zip_list.append(local_zip_path) extract_zip_list.append(local_zip_path) else: exist_tile_list.append(local_tile_path) # Assign needed parameters from returned JSON tile_API_count = int(return_JSON['total']) tiles_needed_count = 0 size_diff_tolerance = 5 exist_dwnld_size = 0 if tile_API_count > 0: dwnld_size = [] dwnld_url = [] dataset_name = [] TNM_file_titles = [] exist_dwnld_url = [] exist_TNM_titles = [] exist_zip_list = [] exist_tile_list = [] extract_zip_list = [] # for each file returned, assign variables to needed parameters for f in return_JSON['items']: TNM_file_title = f['title'] TNM_file_URL = str(f['downloadURL']) TNM_file_size = int(f['sizeInBytes']) TNM_file_name = TNM_file_URL.split(product_url_split)[-1] if gui_product == 'ned': local_file_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name) local_zip_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name) local_tile_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name) else: local_file_path = os.path.join(work_dir, TNM_file_name) local_zip_path = os.path.join(work_dir, TNM_file_name) local_tile_path = os.path.join(work_dir, TNM_file_name) file_exists = os.path.exists(local_file_path) file_complete = None # if file exists, but is incomplete, remove file and redownload if file_exists: existing_local_file_size = os.path.getsize(local_file_path) # if local file is incomplete if abs(existing_local_file_size - TNM_file_size) > size_diff_tolerance: # add file to cleanup list cleanup_list.append(local_file_path) # NLCD API query returns subsets that cannot be filtered before # results are returned. gui_subset is used to filter results. if not gui_subset: tiles_needed_count += 1 down_list() else: if gui_subset in TNM_file_title: tiles_needed_count += 1 down_list() else: continue else: if not gui_subset: tiles_needed_count += 1 exist_list() exist_dwnld_size += TNM_file_size else: if gui_subset in TNM_file_title: tiles_needed_count += 1 exist_list() exist_dwnld_size += TNM_file_size else: continue else: if not gui_subset: tiles_needed_count += 1 down_list() else: if gui_subset in TNM_file_title: tiles_needed_count += 1 down_list() continue # return fatal error if API query returns no results for GUI input elif tile_API_count == 0: gscript.fatal(_("TNM API ERROR or Zero tiles available for given input parameters.")) # number of files to be downloaded file_download_count = len(dwnld_url) # remove existing files from download lists for t in exist_TNM_titles: if t in TNM_file_titles: TNM_file_titles.remove(t) for url in exist_dwnld_url: if url in dwnld_url: dwnld_url.remove(url) # messages to user about status of files to be kept, removed, or downloaded if exist_zip_list: exist_msg = _("\n{0} of {1} files/archive(s) exist locally and will be used by module.").format(len(exist_zip_list), tiles_needed_count) gscript.message(exist_msg) # TODO: fix this way of reporting and merge it with the one in use if exist_tile_list: exist_msg = _("\n{0} of {1} files/archive(s) exist locally and will be used by module.").format(len(exist_tile_list), tiles_needed_count) gscript.message(exist_msg) # TODO: simply continue with whatever is needed to be done in this case if cleanup_list: cleanup_msg = _("\n{0} existing incomplete file(s) detected and removed. Run module again.").format(len(cleanup_list)) gscript.fatal(cleanup_msg) # formats JSON size from bites into needed units for combined file size if dwnld_size: total_size = sum(dwnld_size) len_total_size = len(str(total_size)) if 6 < len_total_size < 10: total_size_float = total_size * 1e-6 total_size_str = str("{0:.2f}".format(total_size_float) + " MB") if len_total_size >= 10: total_size_float = total_size * 1e-9 total_size_str = str("{0:.2f}".format(total_size_float) + " GB") else: total_size_str = '0' # Prints 'none' if all tiles available locally if TNM_file_titles: TNM_file_titles_info = "\n".join(TNM_file_titles) else: TNM_file_titles_info = 'none' # Formatted return for 'i' flag if file_download_count <= 0: data_info = "USGS file(s) to download: NONE" if gui_product == 'nlcd': if tile_API_count != file_download_count: if tiles_needed_count == 0: nlcd_unavailable = "NLCD {0} data unavailable for input parameters".format(gui_subset) gscript.fatal(nlcd_unavailable) else: data_info = ( "USGS file(s) to download:", "-------------------------", "Total download size:\t{size}", "Tile count:\t{count}", "USGS SRS:\t{srs}", "USGS tile titles:\n{tile}", "-------------------------", ) data_info = '\n'.join(data_info).format(size=total_size_str, count=file_download_count, srs=product_srs, tile=TNM_file_titles_info) print(data_info) if gui_i_flag: gscript.info(_("To download USGS data, remove <i> flag, and rerun r.in.usgs.")) sys.exit() # USGS data download process if file_download_count <= 0: gscript.message(_("Extracting existing USGS Data...")) else: gscript.message(_("Downloading USGS Data...")) TNM_count = len(dwnld_url) download_count = 0 local_tile_path_list = [] local_zip_path_list = [] patch_names = [] # Download files for url in dwnld_url: # create file name by splitting name from returned url # add file name to local download directory if gui_product == 'ned': file_name = ned_data_abbrv + url.split(product_url_split)[-1] local_file_path = os.path.join(work_dir, file_name) else: file_name = url.split(product_url_split)[-1] local_file_path = os.path.join(work_dir, file_name) try: # download files in chunks rather than write complete files to memory dwnld_req = urlopen(url, timeout=12) download_bytes = int(dwnld_req.info()['Content-Length']) CHUNK = 16 * 1024 with open(local_file_path, "wb+") as local_file: count = 0 steps = int(download_bytes / CHUNK) + 1 while True: chunk = dwnld_req.read(CHUNK) gscript.percent(count, steps, 10) count += 1 if not chunk: break local_file.write(chunk) gscript.percent(1, 1, 1) local_file.close() download_count += 1 # determine if file is a zip archive or another format if product_is_zip: local_zip_path_list.append(local_file_path) else: local_tile_path_list.append(local_file_path) file_complete = "Download {0} of {1}: COMPLETE".format( download_count, TNM_count) gscript.info(file_complete) except URLError: gscript.fatal(_("USGS download request has timed out. Network or formatting error.")) except StandardError: cleanup_list.append(local_file_path) if download_count: file_failed = "Download {0} of {1}: FAILED".format( download_count, TNM_count) gscript.fatal(file_failed) # sets already downloaded zip files or tiles to be extracted or imported # our pre-stats for extraction are broken, collecting stats during used_existing_extracted_tiles_num = 0 removed_extracted_tiles_num = 0 old_extracted_tiles_num = 0 extracted_tiles_num = 0 if exist_zip_list: for z in exist_zip_list: local_zip_path_list.append(z) if exist_tile_list: for t in exist_tile_list: local_tile_path_list.append(t) if product_is_zip: if file_download_count == 0: pass else: gscript.message("Extracting data...") # for each zip archive, extract needed file files_to_process = len(local_zip_path_list) for i, z in enumerate(local_zip_path_list): # TODO: measure only for the files being unzipped gscript.percent(i, files_to_process, 10) # Extract tiles from ZIP archives try: with zipfile.ZipFile(z, "r") as read_zip: for f in read_zip.namelist(): if f.lower().endswith(product_extensions): extracted_tile = os.path.join(work_dir, str(f)) remove_and_extract = True if os.path.exists(extracted_tile): if use_existing_extracted_files: # if the downloaded file is newer # than the extracted on, we extract if os.path.getmtime(extracted_tile) < os.path.getmtime(z): remove_and_extract = True old_extracted_tiles_num += 1 else: remove_and_extract = False used_existing_extracted_tiles_num += 1 else: remove_and_extract = True if remove_and_extract: removed_extracted_tiles_num += 1 os.remove(extracted_tile) if remove_and_extract: extracted_tiles_num += 1 read_zip.extract(f, work_dir) if os.path.exists(extracted_tile): local_tile_path_list.append(extracted_tile) if not preserve_extracted_files: cleanup_list.append(extracted_tile) except IOError as error: cleanup_list.append(extracted_tile) gscript.fatal(_( "Unable to locate or extract IMG file '{filename}'" " from ZIP archive '{zipname}': {error}").format( filename=extracted_tile, zipname=z, error=error)) gscript.percent(1, 1, 1) # TODO: do this before the extraction begins gscript.verbose(_("Extracted {extracted} new tiles and" " used {used} existing tiles").format( used=used_existing_extracted_tiles_num, extracted=extracted_tiles_num )) if old_extracted_tiles_num: gscript.verbose(_("Found {removed} existing tiles older" " than the corresponding downloaded archive").format( removed=old_extracted_tiles_num )) if removed_extracted_tiles_num: gscript.verbose(_("Removed {removed} existing tiles").format( removed=removed_extracted_tiles_num )) if gui_product == 'lidar' and not has_pdal: gscript.fatal(_("Module v.in.pdal is missing," " cannot process downloaded data.")) # operations for extracted or complete files available locally # We are looking only for the existing maps in the current mapset, # but theoretically we could be getting them from other mapsets # on search path or from the whole location. User may also want to # store the individual tiles in a separate mapset. # The big assumption here is naming of the maps (it is a smaller # for the files in a dedicated download directory). used_existing_imported_tiles_num = 0 imported_tiles_num = 0 mapset = get_current_mapset() files_to_import = len(local_tile_path_list) process_list = [] process_id_list = [] process_count = 0 num_tiles = len(local_tile_path_list) with Manager() as manager: results = manager.dict() for i, t in enumerate(local_tile_path_list): # create variables for use in GRASS GIS import process LT_file_name = os.path.basename(t) LT_layer_name = os.path.splitext(LT_file_name)[0] # we are removing the files if requested even if we don't use them # do not remove by default with NAIP, there are no zip files if gui_product != 'naip' and not preserve_extracted_files: cleanup_list.append(t) # TODO: unlike the files, we don't compare date with input if use_existing_imported_tiles and map_exists("raster", LT_layer_name, mapset): patch_names.append(LT_layer_name) used_existing_imported_tiles_num += 1 else: in_info = _("Importing and reprojecting {name}" " ({count} out of {total})...").format( name=LT_file_name, count=i + 1, total=files_to_import) gscript.info(in_info) process_count += 1 if gui_product != 'lidar': process = Process( name="Import-{}-{}-{}".format(process_count, i, LT_layer_name), target=run_file_import, kwargs=dict( identifier=i, results=results, input=t, output=LT_layer_name, resolution='value', resolution_value=product_resolution, extent="region", resample=product_interpolation, memory=memory )) else: srs = options['input_srs'] process = Process( name="Import-{}-{}-{}".format(process_count, i, LT_layer_name), target=run_lidar_import, kwargs=dict( identifier=i, results=results, input=t, output=LT_layer_name, input_srs=srs if srs else None )) process.start() process_list.append(process) process_id_list.append(i) # Wait for processes to finish when we reached the max number # of processes. if process_count == nprocs or i == num_tiles - 1: exitcodes = 0 for process in process_list: process.join() exitcodes += process.exitcode if exitcodes != 0: if nprocs > 1: gscript.fatal(_("Parallel import and reprojection failed." " Try running with nprocs=1.")) else: gscript.fatal(_("Import and reprojection step failed.")) for identifier in process_id_list: if "errors" in results[identifier]: gscript.warning(results[identifier]["errors"]) else: patch_names.append(results[identifier]["output"]) imported_tiles_num += 1 # Empty the process list process_list = [] process_id_list = [] process_count = 0 # no process should be left now assert not process_list assert not process_id_list assert not process_count gscript.verbose(_("Imported {imported} new tiles and" " used {used} existing tiles").format( used=used_existing_imported_tiles_num, imported=imported_tiles_num )) # if control variables match and multiple files need to be patched, # check product resolution, run r.patch # v.surf.rst lidar params rst_params = dict(tension=25, smooth=0.1, npmin=100) # Check that downloaded files match expected count completed_tiles_count = len(local_tile_path_list) if completed_tiles_count == tiles_needed_count: if len(patch_names) > 1: try: gscript.use_temp_region() # set the resolution if product_resolution: gscript.run_command('g.region', res=product_resolution, flags='a') if gui_product == 'naip': for i in ('1', '2', '3', '4'): patch_names_i = [name + '.' + i for name in patch_names] output = gui_output_layer + '.' + i gscript.run_command('r.patch', input=patch_names_i, output=output) gscript.raster_history(output) elif gui_product == 'lidar': gscript.run_command('v.patch', flags='nzb', input=patch_names, output=gui_output_layer) gscript.run_command('v.surf.rst', input=gui_output_layer, elevation=gui_output_layer, nprocs=nprocs, **rst_params) else: gscript.run_command('r.patch', input=patch_names, output=gui_output_layer) gscript.raster_history(gui_output_layer) gscript.del_temp_region() out_info = ("Patched composite layer '{0}' added").format(gui_output_layer) gscript.verbose(out_info) # Remove files if not -k flag if not preserve_imported_tiles: if gui_product == 'naip': for i in ('1', '2', '3', '4'): patch_names_i = [name + '.' + i for name in patch_names] gscript.run_command('g.remove', type='raster', name=patch_names_i, flags='f') elif gui_product == 'lidar': gscript.run_command('g.remove', type='vector', name=patch_names + [gui_output_layer], flags='f') else: gscript.run_command('g.remove', type='raster', name=patch_names, flags='f') except CalledModuleError: gscript.fatal("Unable to patch tiles.") temp_down_count = _( "{0} of {1} tiles successfully imported and patched").format( completed_tiles_count, tiles_needed_count) gscript.info(temp_down_count) elif len(patch_names) == 1: if gui_product == 'naip': for i in ('1', '2', '3', '4'): gscript.run_command('g.rename', raster=(patch_names[0] + '.' + i, gui_output_layer + '.' + i)) elif gui_product == 'lidar': if product_resolution: gscript.run_command('g.region', res=product_resolution, flags='a') gscript.run_command('v.surf.rst', input=patch_names[0], elevation=gui_output_layer, nprocs=nprocs, **rst_params) if not preserve_imported_tiles: gscript.run_command('g.remove', type='vector', name=patch_names[0], flags='f') else: gscript.run_command('g.rename', raster=(patch_names[0], gui_output_layer)) temp_down_count = _("Tile successfully imported") gscript.info(temp_down_count) else: gscript.fatal(_("No tiles imported successfully. Nothing to patch.")) else: gscript.fatal(_( "Error in getting or importing the data (see above). Please retry.")) # Keep source files if 'k' flag active if gui_k_flag: src_msg = ("<k> flag selected: Source tiles remain in '{0}'").format(work_dir) gscript.info(src_msg) # set appropriate color table if gui_product == 'ned': gscript.run_command('r.colors', map=gui_output_layer, color='elevation') # composite NAIP if gui_product == 'naip': gscript.use_temp_region() gscript.run_command('g.region', raster=gui_output_layer + '.1') gscript.run_command('r.composite', red=gui_output_layer + '.1', green=gui_output_layer + '.2', blue=gui_output_layer + '.3', output=gui_output_layer) gscript.raster_history(gui_output_layer) gscript.del_temp_region()
def main(): # set the home path home = os.path.expanduser('~') # check if user is in GRASS gisbase = os.getenv('GISBASE') if not gisbase: grass.fatal(_('$GISBASE not defined')) return 0 # check ssh if not grass.find_program('ssh', '-V'): grass.fatal( _("%s required. Please install '%s' first.") % ('ssh', 'ssh')) return 0 # parse the grassdata, location e mapset variables = grass.core.gisenv() # check the version version = grass.core.version() # this is would be set automatically if version['version'].find('7.') != -1: grassVersion = 'grass%s%s' % (version['version'][0], version['version'][2]) session_path = '.grass%s' % version['version'][0] else: grass.fatal(_('You are not in a GRASS GIS version 7 session')) return 0 # set the path of grassdata/location/mapset # set to .grass7 folder path = os.path.join(home, session_path, 'g.cloud') if not os.path.exists(path): os.makedirs(path) # set username, password and folder if settings are inserted by stdin if options['config'] == '-': user = raw_input(_('Insert username: '******'config']): grass.fatal(_('The file %s doesn\'t exist' % options['config'])) if stat.S_IMODE(os.stat(options['config']).st_mode) == int('0600', 8): filesett = open(options['config'], 'r') fileread = filesett.readlines() user = fileread[0].strip() passwd = fileread[1].strip() filesett.close() else: err = 'The file permissions of %s are considered insecure.\n' % options[ 'config'] err = 'Please correct permissions to read/write only for user (mode 600)' grass.fatal(_(err)) return 0 # server option server = options['server'] # lazy import import cloud_ssh as sshs # create the sshs session ssh_conn = sshs.ssh_session(user, server, session_path, passwd) if flags['a']: ssh_conn.add() # check if the server has grass and qsub installed, and return the home if options['path'] == '$HOME': serverHome = serverCheck(ssh_conn, grassVersion) else: serverHome = options['path'] if options['reconnect']: reconnect(ssh_conn, options['reconnect'], path, variables, serverHome) else: if options['grass_script']: if not os.path.exists(options['grass_script']): grass.fatal( _("File %s does not exists" % options['grass_script'])) else: grassScript = options['grass_script'] nameGrassScript = os.path.split(grassScript)[-1] else: grass.fatal(_("You have to set %s option") % 'grass_script') if options['qsub_script']: if not os.path.exists(options['qsub_script']): grass.fatal( _("File %s does not exists" % options['qsub_script'])) else: qsubScript = options['qsub_script'] nameQsubScript = os.path.split(qsubScript)[-1] else: grass.fatal(_("You have to set %s option") % 'qsub_script') # the pid of process to have unique value pid = os.path.split(tempfile.mkstemp()[1])[-1] # name for the unique folder serverFolder = os.path.join(serverHome, 'gcloud%s' % pid) ssh_conn.ssh('mkdir %s' % serverFolder) serverGISDBASE = os.path.split(variables['GISDBASE'])[-1] + str(pid) permanent = os.path.join(variables['GISDBASE'], variables['LOCATION_NAME'], 'PERMANENT') # create the new path for $home/GRASSDATA/LOCATION/PERMANENT on the server new_perm = os.path.join(serverHome, serverGISDBASE, variables['LOCATION_NAME'], 'PERMANENT') ssh_conn.ssh('mkdir -p %s' % new_perm) tar = tarfile.open("PERMANENT.tar.gz", "w:gz") tar.add(os.path.join(permanent, 'PROJ_INFO'), 'PROJ_INFO') tar.add(os.path.join(permanent, 'PROJ_UNITS'), 'PROJ_UNITS') tar.add(os.path.join(permanent, 'PROJ_EPSG'), 'PROJ_EPSG') tar.add(os.path.join(permanent, 'DEFAULT_WIND'), 'DEFAULT_WIND') tar.add(os.path.join(permanent, 'WIND'), 'WIND') if os.path.isfile(os.path.join(permanent, 'VAR')): tar.add(os.path.join(permanent, 'VAR'), 'VAR') tar.close() ssh_conn.scp('PERMANENT.tar.gz', serverHome) ssh_conn.ssh('tar -C %s -xzf PERMANENT.tar.gz' % new_perm) ssh_conn.ssh('rm -f PERMANENT.tar.gz') os.remove('PERMANENT.tar.gz') if options['raster'] != '': rasters = options['raster'].split(',') copyMaps(ssh_conn, rasters, 'raster', serverFolder) if options['vector'] != '': rasters = options['vector'].split(',') copyMaps(ssh_conn, rasters, 'vector', serverFolder) # copy the scripts to the server tar = tarfile.open("script_gcloud.tar.gz", "w:gz") if options['raster'] != '' or options['vector'] != '': tar.add(os.path.join(cloudpath, 'cloud_unpack.py'), 'cloud_unpack.py') tar.add(os.path.join(cloudpath, 'cloud_which.py'), 'cloud_which.py') tar.add(os.path.join(cloudpath, 'cloud_collect.sh'), 'cloud_collect.sh') tar.add(os.path.join(cloudpath, 'cloud_mail.sh'), 'cloud_mail.sh') tar.add(grassScript, nameGrassScript) tar.add(qsubScript, nameQsubScript) tar.close() ssh_conn.scp("script_gcloud.tar.gz", serverHome) ssh_conn.ssh('tar -C %s -xzf script_gcloud.tar.gz' % serverFolder) ssh_conn.ssh('rm -f script_gcloud.tar.gz') os.remove('script_gcloud.tar.gz') if options['raster'] != '' or options['vector'] != '': grass.debug( "Launching cloud_unpack.py with this parameters: %s, %s, %s" % (serverFolder, python, new_perm), debug=2) ssh_conn.ssh('"cd %s ; %s cloud_unpack.py %s"' % (serverFolder, python, new_perm)) qsubid = os.path.join(serverFolder, 'tmpqsub') grass.debug("The pid of job is %s" % (str(pid)), debug=2) if options['variables'] != '': vari = ast.literal_eval(options['variables']) values = vari.values() keys = vari.keys() if flags['c']: values = variablesCheckCicle(values) else: values = variablesCheck(values) njobs = 0 for val in range(len(values)): launchstr = '"cd %s ; qsub -v MYPID=%s -v MYLD_LIBRARY_PATH=$LD_LIBRARY_PATH ' \ '-v GRASSDBASE=%s -v MYLOC=%s -v GRASSCRIPT=%s' % ( serverFolder, pid, os.path.join(serverHome, serverGISDBASE), variables['LOCATION_NAME'], os.path.join(serverFolder, nameGrassScript) ) for k in range(len(keys)): launchstr += ' -v %s=%s' % (str( keys[k]), str(values[val][k])) launchstr += ' %s >> %s' % (os.path.join( serverFolder, nameQsubScript), qsubid) ssh_conn.ssh(launchstr) njobs += 1 grass.message(_('Launching %i jobs...' % njobs)) else: launchstr = 'cd %s ; qsub -v MYPID=%s -v MYLD_LIBRARY_PATH=$LD_LIBRARY_PATH ' \ '-v GRASSDBASE=%s -v MYLOC=%s -v GRASSCRIPT=%s %s > %s' % ( serverFolder, pid, os.path.join(serverHome, serverGISDBASE), variables['LOCATION_NAME'], os.path.join(serverFolder, nameGrassScript), os.path.join(serverFolder, nameQsubScript), qsubid ) ssh_conn.ssh(launchstr) grass.message(_('Launching a single job...')) # if options['mail']: # strin = "\"cd %s ; qsub -v MYPID=%s -v MYLD_LIBRARY_PATH=$LD_LIBRARY_PATH -hold_jid " # strin += "-hold_jid `cat %s | tr '\n' ',' | sed 's+,$++g'` %s %s %s %s\"" # ssh_conn.ssh(strin % ( serverFolder, pid, qsubid, # os.path.join(serverFolder, 'cloud_collect.sh'), # os.path.join(serverHome, serverGISDBASE), # variables['LOCATION_NAME'], options['mail']) # ) # else: # strin = "\"cd %s ; qsub -v MYPID=%s -v MYLD_LIBRARY_PATH=$LD_LIBRARY_PATH -hold_jid " # strin += "-hold_jid `cat %s | tr '\n' ',' | sed 's+,$++g'` %s %s %s\"" # ssh_conn.ssh(strin % ( serverFolder, pid, qsubid, # os.path.join(serverFolder, 'cloud_collect.sh'), # os.path.join(serverHome, serverGISDBASE), # variables['LOCATION_NAME']) # ) if options['mail']: mail = options['mail'] else: mail = "NOOO" if flags['k']: remove = "NOOO" else: remove = "yes" ids = ssh_conn.ssh( "cat %s | cut -d' ' -f3 | tr '\n' ',' | sed 's+,$++g'" % qsubid) # 'string %(s)s' % {'s':1} collectstr = "\"cd %s ; qsub -v MYPID=%s -v MYLD_LIBRARY_PATH=$LD_LIBRARY_PATH " % ( serverFolder, pid) collectstr += "-hold_jid %s %s %s %s %s %s %s\"" % ( ids, os.path.join(serverFolder, 'cloud_collect.sh'), os.path.join(serverHome, serverGISDBASE), variables['LOCATION_NAME'], mail, remove, pid) ssh_conn.ssh(collectstr) grass.message( _('If you want to reconnect to this job to see its status please use the reconnect options with this value: %s' % pid)) grass.message( _(' g.cloud config=path|- server=host reconnect=%s' % pid)) ssh_conn.close()
def scan_extent(infile): if not grass.find_program( options['pdal_cmd'].split(' ')[0], ' '.join(options['pdal_cmd'].split(' ')[1:]) + ' info --summary'): grass.fatal( _("The pdal program is not in the path " + "and executable. Please install first")) command_scan = options['pdal_cmd'].split(' ') command_scan.extend(['info', '--summary', infile]) tmp_scan = grass.tempfile() if tmp_scan is None: grass.fatal("Unable to create temporary files") fh = open(tmp_scan, 'wb') summary = True if grass.call(command_scan, stdout=fh) != 0: fh.close() command_scan = options['pdal_cmd'].split(' ') command_scan.extend(['info', infile]) fh2 = open(tmp_scan, 'wb') if grass.call(command_scan, stdout=fh2) != 0: os.remove(tmp_scan) grass.fatal( _("pdal cannot determine metadata " + "for unsupported format of <%s>") % infile) fh2.close() else: fh2.close() summary = False else: fh.close() data = json.load(open(tmp_scan)) if summary: str1 = u'summary' str2 = u'bounds' y_str = u'Y' x_str = u'X' z_str = u'Z' min_str = u'min' max_str = u'max' try: n = str(data[str1][str2][y_str][max_str]) s = str(data[str1][str2][y_str][min_str]) w = str(data[str1][str2][x_str][min_str]) e = str(data[str1][str2][x_str][max_str]) t = str(data[str1][str2][z_str][max_str]) b = str(data[str1][str2][z_str][min_str]) except: ymin_str = u'miny' xmin_str = u'minx' zmin_str = u'minz' ymax_str = u'maxy' xmax_str = u'maxx' zmax_str = u'maxz' n = str(data[str1][str2][ymax_str]) s = str(data[str1][str2][ymin_str]) w = str(data[str1][str2][xmin_str]) e = str(data[str1][str2][xmax_str]) t = str(data[str1][str2][zmax_str]) b = str(data[str1][str2][zmin_str]) else: str1 = u'stats' str2 = u'bbox' str3 = u'native' str4 = u'bbox' n = str(data[str1][str2][str3][str4][u'maxy']) s = str(data[str1][str2][str3][str4][u'miny']) w = str(data[str1][str2][str3][str4][u'minx']) e = str(data[str1][str2][str3][str4][u'maxx']) t = str(data[str1][str2][str3][str4][u'maxz']) b = str(data[str1][str2][str3][str4][u'minz']) return n, s, w, e, t, b
def main(): # Following declarations MAY will used in future for sure. global GISDBASE, LAYERCOUNT, LASTFILE # Check if ImageMagick is available since it is essential if os.name == 'nt': if grass.find_program('magick', '-version'): grass.verbose(_('printws: ImageMagick is available: OK!')) else: grass.fatal( 'ImageMagick is not accessible. See documentation of m.printws module for details.' ) else: if grass.find_program('convert', '-version'): grass.verbose(_('printws: ImageMagick is available: OK!')) else: grass.fatal( 'ImageMagick is not accessible. See documentation of m.printws module for details.' ) textmacros = {} # %nam% macros are kept for backward compatibility textmacros['%TIME24%'] = time.strftime("%H:%M:%S") textmacros['%DATEYMD%'] = time.strftime("%Y.%m.%d") textmacros['%DATEMDY%'] = time.strftime("%m/%d/%Y") if not hasPwd: textmacros['%USERNAME%'] = '(user unknown)' else: textmacros['%USERNAME%'] = pwd.getpwuid(os.getuid())[0] # using $ for macros in the future. New items should be created # exclusively as $macros later on textmacros['\$TIME24'] = textmacros['%TIME24%'] textmacros['\$DATEYMD'] = textmacros['%DATEYMD%'] textmacros['\$DATEMDY'] = textmacros['%DATEMDY%'] textmacros['\$USERNAME'] = textmacros['%USERNAME%'] textmacros[ '\$SPC'] = u'\u00A0' #?? d.text won't display this at string end hmmm # saves region for restoring at end # doing with official method: grass.use_temp_region() # getting/setting screen/print dpi ratio if len(options['dpi']) > 0: dpioption = float(options['dpi']) else: dpioption = 150.0 if len(options['screendpi']) > 0: screendpioption = float(options['screendpi']) else: screendpioption = 100.0 global UPSIZE UPSIZE = float(dpioption) / float(screendpioption) if len(options['input']) > 0: displays = readworkspace(options['input']) else: quit() textmacros['%GXW%'] = options['input'] textmacros['\$GXW'] = textmacros['%GXW%'] displaycounter = 0 # there could be multiple displays in a workspace so we loop them # each display is a whole and independent file assembly for key in displays: textmacros['%DISPLAY%'] = key textmacros['\$DISPLAY'] = key grass.verbose(_('printws: rendering display: ' + key)) displaycounter = displaycounter + 1 layers = copy.deepcopy(displays[key]) # extracting extent information from layers dic and erase the item # extents[0-5] w s e n minz maxz ; extents [6-9] window x y w h extents = layers[0] grass.verbose("m.printws: EXTENTS from workspace:" + str(extents)) # was debug message del layers[0] regionmode = '' if len(options['region']) > 0: grass.run_command("g.region", region=options['region']) regionmode = 'region' else: grass.run_command("g.region", "", w=extents[0], s=extents[1], e=extents[2], n=extents[3]) regionmode = 'window' # setting GRASS rendering environment # dummy file name is defined since the following lines # when switching on the cairo driver would create # an empty map.png in the current directory os.environ['GRASS_RENDER_FILE'] = os.path.join( TMPDIR, str(os.getpid()) + '_DIS_' + str(00) + '_GEN_' + str(00) + '.png') os.environ['GRASS_RENDER_IMMEDIATE'] = 'cairo' os.environ['GRASS_RENDER_FILE_READ'] = 'TRUE' os.environ['GRASS_RENDER_TRANSPARENT'] = 'TRUE' os.environ['GRASS_RENDER_FILE_COMPRESSION'] = '0' os.environ['GRASS_RENDER_FILE_MAPPED'] = 'TRUE' # reading further options and setting defaults if len(options['page']) > 0: pageoption = options['page'] else: pageoption = 'A4landscape' # parsing titles, etc. if len(options['font']) > 0: isAsterisk = options['font'].find('*') if isAsterisk > 0: titlefont = getfontbypattern(options['font'].replace('*', '')) else: titlefont = options['font'] else: titlefont = getfontbypattern('Open') # try to find something UTF-8 grass.verbose(_("printws: titlefont: " + titlefont)) if len(options['titlecolor']) > 0: titlecolor = options['titlecolor'] else: titlecolor = black if len(options['maintitlesize']) > 0: maintitlesize = converttommfrom(float(options['maintitlesize']), options['layunits']) else: maintitlesize = 10.0 if len(options['subtitlesize']) > 0: subtitlesize = converttommfrom(float(options['subtitlesize']), options['layunits']) else: subtitlesize = 7.0 if len(options['pssize']) > 0: pssize = converttommfrom(float(options['pssize']), options['layunits']) else: pssize = 5.0 # Please fasten your seatbelts :) Calculations start here. # ------------------------------------------------------------------- pagesizes = getpagesizes(pageoption) pagesizesindots = dictodots(pagesizes, dpioption) # Leave space for titles up and ps down - still in mm !! upperspace = 0 subtitletop = 0 titletop = 0 if len(options['maintitle']) > 0: titletop = 0.4 * maintitlesize upperspace = upperspace + titletop + maintitlesize if len(options['subtitle']) > 0: subtitletop = upperspace + 0.4 * subtitlesize upperspace = subtitletop + subtitlesize + 1 lowerspace = 0 if (len(options['psundercentral']) > 0) or (len( options['psunderright']) > 0) or (len(options['psunderleft']) > 0): lowerspace = lowerspace + pssize + 2 os.environ['GRASS_RENDER_WIDTH'] = str(pagesizesindots['w']) os.environ['GRASS_RENDER_HEIGHT'] = str(pagesizesindots['h']) pagemargins = getpagemargins(options['pagemargin'], options['layunits']) pagemarginsindots = dictodots(pagemargins, dpioption) # Getting max drawing area in dots mxfd = getmaxframeindots(pagemarginsindots, pagesizesindots) maxframe = str(mxfd['t']) + ',' + str(mxfd['b']) + \ ',' + str(mxfd['l']) + ',' + str(mxfd['r']) # convert font size in mm to percentage for d.text mxfmm = dictomm(mxfd, dpioption) maintitlesize = float(maintitlesize) / (mxfmm['b'] - mxfmm['t']) * 100.0 subtitlesize = float(subtitlesize) / (mxfmm['b'] - mxfmm['t']) * 100.0 pssize = float(pssize) / (mxfmm['r'] - mxfmm['l']) * 100.0 # subtitle location is another issue subtitletoppercent = 100.0 - subtitletop / \ (mxfmm['b'] - mxfmm['t']) * 100.0 titletoppercent = 100.0 - titletop / \ (mxfmm['b'] - mxfmm['t']) * 100.0 mapul = getmapUL(options['mapupperleft'], options['layunits']) mapulindots = dictodots(mapul, dpioption) mapsizes = getmapsizes(options['mapsize'], options['layunits']) mapsizesindots = dictodots(mapsizes, dpioption) # Correcting map area ratio to ratio of region edges # OR screen window edges depeding on "regionmode" # for later: grass.use_temp_region() ISLATLONG = False s = grass.read_command("g.region", flags='p') kv = grass.parse_key_val(s, sep=':') regioncols = float(kv['cols'].strip()) regionrows = float(kv['rows'].strip()) ewrestemp = kv['ewres'].strip() nsrestemp = kv['nsres'].strip() if ewrestemp.find(':') > 0: ISLATLONG = True ewrestemp = ewrestemp.split(':') ewres = float(ewrestemp[0]) + float(ewrestemp[1]) / 60.0 + float( ewrestemp[2]) / 3600.0 nsrestemp = nsrestemp.split(':') nsres = float(nsrestemp[0]) + float(nsrestemp[1]) / 60.0 + float( nsrestemp[2]) / 3600.0 else: ewres = float(ewrestemp) nsres = float(nsrestemp) sizex = regioncols * ewres sizey = regionrows * nsres grass.verbose(_("printws: sizex " + str(sizex))) grass.verbose(_("printws: sizey " + str(sizey))) if regionmode == 'region': hregionratio = float(sizex) / float(sizey) grass.verbose(_("printws: REGION MODE -> region ")) else: # surprisingly doing the SAME # using screen window ratio for map area # next line was a test for this but didn't help on gadgets positioning #hregionratio = float(extents[8]) / float(extents[9]) hregionratio = float(sizex) / float(sizey) grass.verbose(_("printws: REGION MODE -> window")) hmapratio = mapsizes['w'] / mapsizes['h'] grass.verbose(_("printws: raw mapsizes: " + str(mapsizesindots))) grass.verbose(_("printws: hr: " + str(hregionratio))) grass.verbose(_("printws: hm: " + str(hmapratio))) if hregionratio > hmapratio: grass.verbose( _("printws: Map area height correction / " + str(hregionratio))) mapsizes['h'] = mapsizes['w'] / hregionratio elif hregionratio < hmapratio: grass.verbose( _("printws: Map area width correction * " + str(hregionratio))) mapsizes['w'] = mapsizes['h'] * hregionratio mapsizesindots = dictodots(mapsizes, dpioption) # changing region resolution to match print resolution # to eliminate unnecessary CPU heating/data transfer # so as to make it faster # with only invisible detail loss. colsregiontomap = float(mapsizesindots['w']) / regioncols rowsregiontomap = float(mapsizesindots['h']) / regionrows newewres = ewres newnsres = nsres # if colsregiontomap < 1: # CHANGE: also enables raising of resolution to prevent # pixelation because of low resolution setting... newewres = ewres / colsregiontomap # if rowsregiontomap < 1: newnsres = nsres / rowsregiontomap # WOW - no necessary to convert back to DMS for nsres / ewres #if ISLATLONG: # newewresstr=decdeg2dms(newewres) # newnsresstr=decdeg2dms(newnsres) #else: newewresstr = str(newewres) newnsresstr = str(newnsres) grass.run_command("g.region", ewres=newewresstr, nsres=newnsresstr) # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # it seems that d.wms uses the GRASS_REGION from region info # others may also do so we set it # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! kv2 = {} kv2['e'] = kv['east'] kv2['n'] = kv['north'] kv2['s'] = kv['south'] kv2['w'] = kv['west'] kv2['ewres'] = newewresstr kv2['nsres'] = newnsresstr #kv2['rows'] #- autocalculated to resolution - no need to set explicitly #kv2['cols'] #- autocalculated to resolution - no need to set explicitly #grass.message(str(kv2)) #grass.message(grass.region_env(**kv2)) #grass.message(s) os.environ['GRASS_REGION'] = grass.region_env(**kv2) # Getting mapping area in dots # Correcting mxfd to leave space for title and subscript pagemarginstitles = copy.deepcopy(pagemargins) pagemarginstitles['t'] = pagemarginstitles['t'] + upperspace pagemarginstitles['b'] = pagemarginstitles['b'] + lowerspace pagemarginsindotstitles = dictodots(pagemarginstitles, dpioption) mxfdtitles = getmaxframeindots(pagemarginsindotstitles, pagesizesindots) mpfd = getmapframeindots(mapulindots, mapsizesindots, mxfdtitles) if pageoption == 'Flexi': # For 'Flexi' page we modify the setup to create # a page containing only the map without margins grass.verbose(_("printws: pre Flexi mapframe: " + str(mpfd))) mpfd['b'] = mpfd['b'] - mpfd['t'] mpfd['t'] = 0 mpfd['r'] = mpfd['r'] - mpfd['l'] mpfd['l'] = 0 os.environ['GRASS_RENDER_WIDTH'] = str(mpfd['r']) os.environ['GRASS_RENDER_HEIGHT'] = str(mpfd['b']) grass.verbose(_("printws: post Flexi mapframe: " + str(mpfd))) mapframe = str(mpfd['t']) + ',' + str(mpfd['b']) + \ ',' + str(mpfd['l']) + ',' + str(mpfd['r']) grass.verbose(_("printws: DOT VALUES ARE:")) grass.verbose(_("printws: maxframe: " + str(mxfd))) grass.verbose(_("printws: maxframe: " + maxframe)) grass.verbose(_("printws: mapframe: " + str(mpfd))) grass.verbose(_("printws: mapframe: " + mapframe)) grass.verbose(_("printws: page: " + str(pagesizesindots))) grass.verbose(_("printws: margins: " + str(pagemarginsindots))) grass.verbose(_("printws: mapUL: " + str(mapulindots))) grass.verbose( _("printws: mapsizes (corrected): " + str(mapsizesindots))) grass.verbose(_("printws: ewres (corrected): " + str(newewres))) grass.verbose(_("printws: nsres (corrected): " + str(newnsres))) # quit() # ------------------- INMAP ------------------- # Do not limit -map. It was: -limit map 720000000 before... # So we can grow on disk as long as it lasts imcommand = 'convert -limit memory 720000000 -units PixelsPerInch -density ' + \ str(int(dpioption)) + ' ' if os.name == 'nt': imcommand = 'magick ' + imcommand os.environ['GRASS_RENDER_FRAME'] = mapframe grass.verbose(_("printws: Rendering: the following layers: ")) lastopacity = '-1' for lay in layers: grass.verbose(_(lay[1] + ' at: ' + lay[0] + ' opacity')) if lay[0] == '1': if lastopacity <> '1': LASTFILE = os.path.join(TMPDIR, str(os.getpid()) + \ '_DIS_' + str(displaycounter) + '_GEN_' + \ str(LAYERCOUNT) + '.' + TMPFORMAT) os.environ['GRASS_RENDER_FILE'] = LASTFILE LAYERCOUNT = LAYERCOUNT + 2 imcommand = imcommand + ' ' + LASTFILE lastopacity = '1' render(lay[1], lay[2], lay[3]) else: lastopacity = lay[0] LASTFILE = os.path.join( TMPDIR, str(os.getpid()) + '_DIS_' + str(displaycounter) + '_GEN_' + str(LAYERCOUNT) + '.' + TMPFORMAT) LAYERCOUNT = LAYERCOUNT + 2 os.environ['GRASS_RENDER_FILE'] = LASTFILE grass.verbose("LAY: " + str(lay)) render(lay[1], lay[2], lay[3]) imcommand = imcommand + \ ' \( ' + LASTFILE + ' -channel a -evaluate multiply ' + \ lay[0] + ' +channel \)' # setting resolution back to pre-script state since map rendering is # finished # CHANGE: not necessary anymore since we use temp_region now # However, since we did set GRASS_REGION, let's redo it here os.environ.pop('GRASS_REGION') # ------------------- OUTSIDE MAP texts, etc ------------------- if pageoption == 'Flexi': grass.verbose( _('m.printws: WARNING! Felxi mode, will not create titles, etc...' )) else: os.environ['GRASS_RENDER_FRAME'] = maxframe dict = {} dict['task'] = "d.text" dict['color'] = titlecolor dict['font'] = titlefont dict['charset'] = "UTF-8" if len(options['maintitle']) > 1: dict['text'] = decodetextmacros(options['maintitle'], textmacros) dict['at'] = "50," + str(titletoppercent) dict['align'] = "uc" dict['size'] = str(maintitlesize) render(str(dict), dict, {}) if len(options['subtitle']) > 1: dict['text'] = decodetextmacros(options['subtitle'], textmacros) dict['at'] = "50," + str(subtitletoppercent) dict['align'] = "uc" dict['size'] = str(subtitlesize) render(str(dict), dict, {}) dict['size'] = str(pssize) if len(options['psundercentral']) > 1: dict['text'] = decodetextmacros(options['psundercentral'], textmacros) dict['at'] = "50,1" dict['align'] = "lc" render(str(dict), dict, {}) if len(options['psunderleft']) > 1: dict['text'] = decodetextmacros(options['psunderleft'], textmacros) dict['at'] = "0,1" dict['align'] = "ll" render(str(dict), dict, {}) if len(options['psunderright']) > 1: dict['text'] = decodetextmacros(options['psunderright'], textmacros) dict['at'] = "100,1" dict['align'] = "lr" render(str(dict), dict, {}) # ------------------- GENERATING OUTPUT FILE ------------------- if len(options['output']) > 1: output = options['output'] else: output = 'map_' + str(os.getpid()) # remove extension AND display number and naming if any output = os.path.splitext(output)[0] output = re.sub('_DISPLAY_[0-9]+_.*', '', output) if len(options['format']) > 1: extension = options['format'] else: extension = 'pdf' displaypart = '' if len(displays) > 1: displaypart = '_DISPLAY_' + str(displaycounter) + '_' + key pagedata = getpagedata(pageoption) #params= ' -extent '+str(pagesizesindots['w'])+'x'+str(pagesizesindots['h'])+' -gravity center -compress jpeg -page '+pagedata['page']+' '+pagedata['parameters']+' -units PixelsPerInch -density '+str(dpioption)+'x'+str(dpioption)+' ' params = ' -compress jpeg -quality 92 ' + \ pagedata['parameters'] + ' -units PixelsPerInch -density ' + \ str(int(dpioption)) + ' ' imcommand = imcommand + ' -layers flatten ' + params + \ '"' + output + displaypart + '.' + extension + '"' grass.verbose( _('printws: And the imagemagick command is... ' + imcommand)) os.system(imcommand) if not flags['d']: grass.verbose(_('printws: Doing graceful cleanup...')) os.system('rm ' + os.path.join(TMPDIR, str(os.getpid()) + '*_GEN_*')) if REMOVE_TMPDIR: try_rmdir(TMPDIR) else: grass.message( "\n%s\n" % _("printws: Temp dir remove failed. Do it yourself, please:")) sys.stderr.write('%s\n' % TMPDIR % ' <---- this') # restoring pre-script region # - not necessary as we are using grass.use_temp_region() in the future return 0
def __init__(self, giface, parent): wx.Dialog.__init__(self, parent, title="Tangible Landscape", style=wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER) self.giface = giface self.parent = parent if not gscript.find_program('r.in.kinect'): self.giface.WriteError("ERROR: Module r.in.kinect not found.") self.settings = {} UserSettings.ReadSettingsFile(settings=self.settings) # for the first time if not 'tangible' in self.settings: self.settings['tangible'] = { 'calibration': { 'matrix': None }, 'analyses': { 'file': None, 'contours': None, 'contours_step': 1 }, 'scan': { 'scan_name': 'scan', 'elevation': '', 'region': '', 'zexag': 1., 'smooth': 7, 'numscans': 1, 'rotation_angle': 180, 'resolution': 2, 'trim_nsewtb': '30,30,30,30,60,100', 'interpolate': False, 'trim_tolerance': 0.7, 'equalize': False } } self.scan = self.settings['tangible']['scan'] self.calib_matrix = self.settings['tangible']['calibration']['matrix'] if not self.calib_matrix: giface.WriteWarning("WARNING: No calibration file exists") self.delay = 0.3 self.process = None self.observer = None self.timer = wx.Timer(self) self.changedInput = False self.notebook = wx.Notebook(self) scanning_panel = ScanningPanel(self.notebook, self.giface, self.settings['tangible']) self.notebook.AddPage(scanning_panel, "Scanning") scanning_panel.settingsChanged.connect( lambda: setattr(self, 'changedInput', True)) analyses_panel = AnalysesPanel(self.notebook, self.giface, self.settings['tangible']) self.notebook.AddPage(analyses_panel, "Analyses") self.exportPanel = ExportPanel(self.notebook, self.giface, self.settings['tangible']) self.notebook.AddPage(self.exportPanel, "Export") self.exportPanel.settingsChanged.connect( lambda: setattr(self, 'changedInput', True)) self.drawing_panel = DrawingPanel(self.notebook, self.giface, self.settings['tangible']) self.notebook.AddPage(self.drawing_panel, "Drawing") self.drawing_panel.Bind(EVT_UPDATE_GUI, self.OnUpdate) self.drawing_panel.settingsChanged.connect( lambda: setattr(self, 'changedInput', True)) self.color_panel = ColorInteractionPanel(self.notebook, self.giface, self.settings['tangible'], scaniface=self) self.notebook.AddPage(self.color_panel, "Color") btnStart = wx.Button(self, label="Start") btnStop = wx.Button(self, label="Stop") btnPause = wx.Button(self, label="Pause") self.btnPause = btnPause btnScanOnce = wx.Button(self, label="Scan once") btnCalibrate = wx.Button(self, label="Calibrate") btnHelp = wx.Button(self, label="Help") btnClose = wx.Button(self, label="Close") self.status = wx.StaticText(self) # bind events btnStart.Bind(wx.EVT_BUTTON, lambda evt: self.Start()) btnStop.Bind(wx.EVT_BUTTON, lambda evt: self.Stop()) btnPause.Bind(wx.EVT_BUTTON, lambda evt: self.Pause()) btnCalibrate.Bind(wx.EVT_BUTTON, self.Calibrate) btnScanOnce.Bind(wx.EVT_BUTTON, self.ScanOnce) btnHelp.Bind(wx.EVT_BUTTON, self.OnHelp) btnClose.Bind(wx.EVT_BUTTON, self.OnClose) self.Layout() sizer = wx.BoxSizer(wx.VERTICAL) hSizer = wx.BoxSizer(wx.HORIZONTAL) hSizer.Add(btnStart, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnStop, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnPause, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnCalibrate, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnScanOnce, flag=wx.EXPAND | wx.ALL, border=5) sizer.Add(hSizer, 0, wx.ALL | wx.EXPAND, 5) hSizer = wx.BoxSizer(wx.HORIZONTAL) hSizer.Add(self.status, flag=wx.EXPAND | wx.LEFT, border=5) sizer.Add(hSizer) sizer.Add(self.notebook, 1, wx.ALL | wx.EXPAND, 5) hSizer = wx.BoxSizer(wx.HORIZONTAL) hSizer.AddStretchSpacer() hSizer.Add(btnHelp, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnClose, flag=wx.EXPAND | wx.ALL, border=5) sizer.Add(hSizer, flag=wx.EXPAND) self.SetSizer(sizer) sizer.Fit(self) self.SetMinSize(self.GetBestSize()) self.Layout() self.Bind(wx.EVT_TIMER, self.RestartIfNotRunning, self.timer) self.Bind(wx.EVT_CLOSE, self.OnClose) self.Bind(EVT_UPDATE_GUI, self.OnUpdate) self.Bind(EVT_ADD_LAYERS, self.OnAddLayers) self.Bind(EVT_REMOVE_LAYERS, self.OnRemoveLayers) self.Bind(EVT_CHECK_LAYERS, self.OnCheckLayers) self.pause = None
def main(): dem = options["input"] tri = options["output"] size = int(options["size"]) exponent = float(options["exponent"]) processes = int(options["processes"]) circular = flags["c"] radius = int((size - 1) / 2) region = Region() # Some checks if "@" in tri: tri = tri.split("@")[0] if processes == 0: gs.fatal( "Number of processing cores for parallel computation must not equal 0" ) if processes < 0: system_cores = mp.cpu_count() processes = system_cores + processes + 1 if processes > 1: if gs.find_program("r.mapcalc.tiled") is False: gs.fatal( "The GRASS addon r.mapcalc.tiled must also be installed if n_jobs != 1. Run 'g.extension r.mapcalc.tiled'" ) if size <= 2 or size > 51: gs.fatal("size must be > 2 and <= 51") if size % 2 != 1: gs.fatal("size must be an odd number") if exponent < 0 or exponent > 4.0: gs.fatal("exponent must be >= 0 and <= 4.0") # Calculate TRI based on map calc statements gs.message("Calculating the Topographic Ruggedness Index...") # Generate a list of spatial neighbourhood offsets for the chosen radius # ignoring the center cell offsets = focal_expr(radius, circular) weights = idw_weights(radius, exponent, circular) terms = [] for d, w in zip(offsets, weights): d_str = ",".join(map(str, d)) terms.append("{w}*abs({dem}[{d}]-{dem})".format(dem=dem, d=d_str, w=w)) # Define the calculation expression terms = "+".join(terms) # Perform the r.mapcalc calculation with the moving window expr = "{tri} = float({terms})".format(tri=tri, terms=terms) width, height = tile_shape(region, processes) if width < region.cols and height < region.rows and processes > 1: gr.mapcalc_tiled( expression=expr, width=width, height=height, processes=processes, overlap=int((size + 1) / 2), output=tri, ) else: gs.mapcalc(expr) # update metadata opts = "" if circular: opts = "-c" gr.support( map=tri, title="Terrain Ruggedness Index", description= "Generated with r.tri input={dem} output={tri} size={size} exponent={exponent} {flags}" .format(dem=dem, tri=tri, size=size, exponent=exponent, flags=opts), ) return 0
def footprint_to_vectormap(infile, footprint): """ The function generates a footprint as vectormap of the input las-file. It uses pdal info --boundary. Args: infile(string): Name of LAS input file footprint(string): Footprint of the data as vector map """ if not grass.find_program( options['pdal_cmd'].split(' ')[0], ' '.join(options['pdal_cmd'].split(' ')[1:]) + ' info --boundary'): grass.fatal( _("The pdal executable is not available." " Install PDAL or put the pdal executable on path.")) command_fp = options['pdal_cmd'].split(' ') command_fp.extend(['info', '--boundary', infile]) tmp_fp = grass.tempfile() if tmp_fp is None: grass.fatal("Unable to create temporary files") fh = open(tmp_fp, 'wb') if grass.call(command_fp, stdout=fh) != 0: fh.close() # check to see if pdal info executed properly os.remove(tmp_fp) grass.fatal(_("pdal info broken...")) else: fh.close() data = json.load(open(tmp_fp)) xy_in = '' str1 = u'boundary' try: str2 = u'boundary_json' str3 = u'coordinates' coord = data[str1][str2][str3][0][0] for xy in coord: xy_in += str(xy[0]) + ',' + str(xy[1]) + '\n' except Exception: coord_str = str(data[str1][str1]) coord = coord_str[coord_str.find('((') + 2:coord_str.find('))')] x_y = coord.split(',') for xy in x_y: xy_in += xy.rstrip().replace(' ', ',') + '\n' tmp_xy = grass.tempfile() if tmp_xy is None: grass.fatal("Unable to create temporary files") f = open(tmp_xy, 'w') f.write(xy_in[:-1]) f.close() grass.run_command('v.in.lines', input=tmp_xy, output='footprint_line', separator='comma') grass.run_command('g.region', vector='footprint_line') grass.run_command('v.type', input='footprint_line', out='footprint_boundary', from_type='line', to_type='boundary') grass.run_command('v.centroids', input='footprint_boundary', out=footprint) grass.run_command('v.db.addtable', map=footprint, columns='name varchar(50)') grass.run_command('v.db.update', map=footprint, column='name', value=infile) # Cleaning up grass.message(_("Cleaning up...")) os.remove(tmp_fp) os.remove(tmp_xy) grass.run_command('g.remove', flags='f', type='vector', name='footprint_line', quiet=True) grass.run_command('g.remove', flags='f', type='vector', name='footprint_boundary', quiet=True) # metadata grass.run_command('v.support', map=footprint, comment='in ' + os.environ['CMDLINE']) grass.message(_("Generating output vector map <%s>...") % footprint)
def check_requirements(): # mdenoise if not grass.find_program("mdenoise"): grass.fatal( _("mdenoise required. Follow instructions in html manual page to install it (g.manual r.denoise)." ))
def __init__(self, giface, parent): wx.Dialog.__init__(self, parent, title="Tangible Landscape", style=wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER) self.giface = giface self.parent = parent if not gscript.find_program('r.in.kinect'): self.giface.WriteError("ERROR: Module r.in.kinect not found.") self.settings = {} UserSettings.ReadSettingsFile(settings=self.settings) # for the first time if not 'tangible' in self.settings: self.settings['tangible'] = {'calibration': {'matrix': None}, 'analyses': {'file': None, 'contours': None, 'contours_step': 1} } self.calib_matrix = self.settings['tangible']['calibration']['matrix'] self.delay = 0.3 self.process = None self.observer = None self.timer = wx.Timer(self) self.changedInput = False self.filter = {'filter': False, 'counter': 0, 'threshold': 0.1, 'debug': False} # to be able to add params to runAnalyses from outside self.additionalParams4Analyses = {} self.notebook = wx.Notebook(self) self.scanning_panel = ScanningPanel(self.notebook, self.giface, self.settings['tangible'], scaniface=self) self.notebook.AddPage(self.scanning_panel, "Scanning") self.scan = self.settings['tangible']['scan'] self.outputPanel = OutputPanel(self.notebook, self.giface, self.settings['tangible']) self.notebook.AddPage(self.outputPanel, "Output") self.scanning_panel.settingsChanged.connect(lambda: setattr(self, 'changedInput', True)) analyses_panel = AnalysesPanel(self.notebook, self.giface, self.settings['tangible'], scaniface=self) self.notebook.AddPage(analyses_panel, "Analyses") analyses_panel.settingsChanged.connect(lambda: setattr(self, 'changedInput', True)) self.outputPanel.settingsChanged.connect(lambda: setattr(self, 'changedInput', True)) self.drawing_panel = DrawingPanel(self.notebook, self.giface, self.settings['tangible']) self.notebook.AddPage(self.drawing_panel, "Drawing") self.drawing_panel.Bind(EVT_UPDATE_GUI, self.OnUpdate) self.drawing_panel.settingsChanged.connect(lambda: setattr(self, 'changedInput', True)) self.activities_panel = ActivitiesPanel(self.notebook, self.giface, self.settings['tangible'], scaniface=self) self.notebook.AddPage(self.activities_panel, "Activities") btnStart = wx.Button(self, label="Start") btnStop = wx.Button(self, label="Stop") btnPause = wx.Button(self, label="Pause") self.btnPause = btnPause btnScanOnce = wx.Button(self, label="Scan once") btnHelp = wx.Button(self, label="Help") btnClose = wx.Button(self, label="Close") self.status = wx.StaticText(self) # bind events btnStart.Bind(wx.EVT_BUTTON, lambda evt: self.Start()) btnStop.Bind(wx.EVT_BUTTON, lambda evt: self.Stop()) btnPause.Bind(wx.EVT_BUTTON, lambda evt: self.Pause()) btnScanOnce.Bind(wx.EVT_BUTTON, self.ScanOnce) btnHelp.Bind(wx.EVT_BUTTON, self.OnHelp) btnClose.Bind(wx.EVT_BUTTON, self.OnClose) self.Layout() sizer = wx.BoxSizer(wx.VERTICAL) hSizer = wx.BoxSizer(wx.HORIZONTAL) hSizer.Add(btnStart, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnStop, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnPause, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnScanOnce, flag=wx.EXPAND | wx.ALL, border=5) sizer.Add(hSizer, 0, wx.ALL | wx.EXPAND, 5) hSizer = wx.BoxSizer(wx.HORIZONTAL) hSizer.Add(self.status, flag=wx.EXPAND | wx.LEFT, border=5) sizer.Add(hSizer) sizer.Add(self.notebook, 1, wx.ALL | wx.EXPAND, 5) hSizer = wx.BoxSizer(wx.HORIZONTAL) hSizer.AddStretchSpacer() hSizer.Add(btnHelp, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnClose, flag=wx.EXPAND | wx.ALL, border=5) sizer.Add(hSizer, flag=wx.EXPAND) self.SetSizer(sizer) sizer.Fit(self) self.SetMinSize(self.GetBestSize()) self.Layout() self.Bind(wx.EVT_TIMER, self.RestartIfNotRunning, self.timer) self.Bind(wx.EVT_CLOSE, self.OnClose) self.Bind(EVT_UPDATE_GUI, self.OnUpdate) self.Bind(EVT_ADD_LAYERS, self.OnAddLayers) self.Bind(EVT_REMOVE_LAYERS, self.OnRemoveLayers) self.Bind(EVT_CHECK_LAYERS, self.OnCheckLayers) self.Bind(EVT_SELECT_LAYERS, self.OnSelectLayers) self.Bind(EVT_CHANGE_LAYER, self.OnChangeLayer) self.pause = None self.resume_once = None
def __init__(self, giface, parent): wx.Dialog.__init__(self, parent, title="Tangible Landscape", style=wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER) self.giface = giface self.parent = parent if not gscript.find_program('r.in.kinect'): self.giface.WriteError("ERROR: Module r.in.kinect not found.") self.settings = {} UserSettings.ReadSettingsFile(settings=self.settings) # for the first time if not 'tangible' in self.settings: self.settings['tangible'] = {'calibration': {'matrix': None}, 'analyses': {'file': None, 'contours': None, 'contours_step': 1}, 'scan': {'scan_name': 'scan', 'elevation': '', 'region': '', 'zexag': 1., 'smooth': 7, 'numscans': 1, 'rotation_angle': 180, 'resolution': 2, 'trim_nsewtb': '30,30,30,30,60,100', 'interpolate': False, 'trim_tolerance': 0.7, 'equalize': False }, 'export': {'active': True, 'file': '' } } self.scan = self.settings['tangible']['scan'] self.calib_matrix = self.settings['tangible']['calibration']['matrix'] if not self.calib_matrix: giface.WriteWarning("WARNING: No calibration file exists") self.delay = 0.3 self.process = None self.observer = None self.timer = wx.Timer(self) self.changedInput = False self.notebook = wx.Notebook(self) scanning_panel = ScanningPanel(self.notebook, self.giface, self.settings['tangible']) self.notebook.AddPage(scanning_panel, "Scanning") scanning_panel.settingsChanged.connect(lambda: setattr(self, 'changedInput', True)) analyses_panel = AnalysesPanel(self.notebook, self.giface, self.settings['tangible']) self.notebook.AddPage(analyses_panel, "Analyses") btnStart = wx.Button(self, label="Start") btnStop = wx.Button(self, label="Stop") btnScanOnce = wx.Button(self, label="Scan once") btnCalibrate = wx.Button(self, label="Calibrate") btnHelp = wx.Button(self, label="Help") btnClose = wx.Button(self, label="Close") self.status = wx.StaticText(self) # bind events btnStart.Bind(wx.EVT_BUTTON, lambda evt: self.Start()) btnStop.Bind(wx.EVT_BUTTON, lambda evt: self.Stop()) btnCalibrate.Bind(wx.EVT_BUTTON, self.Calibrate) btnScanOnce.Bind(wx.EVT_BUTTON, self.ScanOnce) btnHelp.Bind(wx.EVT_BUTTON, self.OnHelp) btnClose.Bind(wx.EVT_BUTTON, self.OnClose) self.Layout() sizer = wx.BoxSizer(wx.VERTICAL) hSizer = wx.BoxSizer(wx.HORIZONTAL) hSizer.Add(btnStart, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnStop, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnCalibrate, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnScanOnce, flag=wx.EXPAND | wx.ALL, border=5) sizer.Add(hSizer, 0, wx.ALL | wx.EXPAND, 5) hSizer = wx.BoxSizer(wx.HORIZONTAL) hSizer.Add(self.status, flag=wx.EXPAND | wx.LEFT, border=5) sizer.Add(hSizer) sizer.Add(self.notebook, 1, wx.ALL | wx.EXPAND, 5) hSizer = wx.BoxSizer(wx.HORIZONTAL) hSizer.AddStretchSpacer() hSizer.Add(btnHelp, flag=wx.EXPAND | wx.ALL, border=5) hSizer.Add(btnClose, flag=wx.EXPAND | wx.ALL, border=5) sizer.Add(hSizer, flag=wx.EXPAND) self.SetSizer(sizer) sizer.Fit(self) self.SetMinSize(self.GetBestSize()) self.Layout() self.Bind(wx.EVT_TIMER, self.RestartIfNotRunning, self.timer) self.Bind(wx.EVT_CLOSE, self.OnClose) self.Bind(EVT_UPDATE_GUI, self.OnUpdate)
def main(): options, flags = g.parser() Blue = options["blue_band"] Green = options["green_band"] Red = options["red_band"] NIR = options["nir_band"] SWIR = options["band_for_correction"] Calibration_points = options["calibration_points"] Area_of_interest = options["area_of_interest"] Additional_band1 = options["additional_band1"] Additional_band2 = options["additional_band2"] Additional_band3 = options["additional_band3"] Additional_band4 = options["additional_band4"] bathymetry = options["depth_estimate"] tide_height = options["tide_height"] calibration_column = options["calibration_column"] bisquare = flags["b"] fixed_GWR = flags["f"] res = g.parse_command("g.region", raster=Green, flags="g") g.run_command( "v.to.rast", input=Calibration_points, type="point", use="attr", attribute_column=calibration_column, output="tmp_Calibration_points", ) # hull generation from calibration depth points g.run_command("v.hull", input=Calibration_points, output="tmp_hull", overwrite=True) # buffer the hull to ceate a region for including all calibration points g.run_command( "v.buffer", input="tmp_hull", output="tmp_buffer", distance=float(res["nsres"]), overwrite=True, ) if tide_height: cal = g.parse_command("r.univar", map="tmp_Calibration_points", flags="g") if float(cal["min"]) >= 0: t = float(tide_height) g.mapcalc( exp="{d}=({d}+float({t}))".format(d="tmp_Calibration_points", t=t), overwrite=True, ) if float(cal["min"]) < 0: t = float(tide_height) * -1 g.mapcalc( exp="{d}=({d}+float({t}))".format(d="tmp_Calibration_points", t=t), overwrite=True, ) g.mapcalc(exp="{tmp_ratio}=({Green}/{SWIR})".format( tmp_ratio="tmp_ratio", Green=Green, SWIR=SWIR)) g.mapcalc(exp="{tmp_NDVI}=float({NIR}-{Red})/float({NIR}+{Red})".format( tmp_NDVI="tmp_NDVI", NIR=NIR, Red=Red)) g.mapcalc( exp="{tmp_water}=if({tmp_ratio} < 1, null(), if({tmp_NDVI} <" "0, {tmp_ratio}, null()))".format( tmp_NDVI="tmp_NDVI", tmp_water="tmp_water", tmp_ratio="tmp_ratio")) g.run_command("r.mask", raster="tmp_water", overwrite=True) li = [ Green, Additional_band1, Additional_band2, Additional_band3, Additional_band4, Blue, Red, ] for i in li: j, sep, tail = i.partition("@") tmp_ = RasterRow(str(i)) if tmp_.exist() is False: continue g.message("Ditermining minimum value for %s" % i) g.run_command("g.region", vector=Calibration_points) # To ignore zero values g.mapcalc( exp="{tmp_b}=if({x}>1, {x},null())".format(tmp_b="tmp_b", x=str(i)), overwrite=True, ) tmp_AOI = g.parse_command("r.univar", map="tmp_b", flags="g") tmp_AOI_min = float(tmp_AOI["min"]) g.run_command("g.region", raster=Green) try: g.mapcalc( exp="{tmp_deep}=if({tmp_band}<{band_min}, {tmp_band}," "null())".format(tmp_deep="tmp_deep", band_min=tmp_AOI_min, tmp_band=str(i)), overwrite=True, ) g.run_command("r.mask", raster="tmp_deep", overwrite=True) tmp_coe = g.parse_command("r.regression.line", mapx=SWIR, mapy=str(i), flags="g") g.message("Deep water ditermination for %s" % i) if Area_of_interest: g.run_command("r.mask", vector=Area_of_interest, overwrite=True) g.run_command("g.region", vector=Area_of_interest) else: g.run_command("r.mask", vector="tmp_buffer", overwrite=True) g.run_command("g.region", vector=Calibration_points) g.mapcalc( exp="{tmp_crct}=log({tmp_band}-{a}-{b}*{SWIR})".format( tmp_crct="tmp_crct" + str(j), tmp_band=str(i), a=float(tmp_coe["a"]), b=float(tmp_coe["b"]), SWIR=SWIR, ), overwrite=True, ) g.run_command("r.mask", raster="tmp_water", overwrite=True) g.mapcalc("{tmp_crctd} = ({tmp_crct} * 1)".format( tmp_crct="tmp_crct" + str(j), tmp_crctd="tmp_crctd" + str(j))) except: g.message("Cannot find deep water pixels") if Area_of_interest: g.run_command("r.mask", vector=Area_of_interest, overwrite=True) g.run_command("g.region", vector=Area_of_interest) else: g.run_command("r.mask", vector="tmp_buffer", overwrite=True) g.run_command("g.region", vector=Calibration_points) g.mapcalc( exp="{tmp_crct}=log({tmp_band}-{a}-{b}*{SWIR})".format( tmp_crct="tmp_crct" + str(j), tmp_band=str(i), a=float(tmp_coe["a"]), b=float(tmp_coe["b"]), SWIR=SWIR, ), overwrite=True, ) g.run_command("r.mask", raster="tmp_water", overwrite=True) g.mapcalc("{tmp_crctd} = ({tmp_crct} * 1)".format( tmp_crct="tmp_crct" + str(j), tmp_crctd="tmp_crctd" + str(j))) crctd_lst.append("tmp_crctd" + str(j)) if fixed_GWR: if not g.find_program("r.gwr", "--help"): g.run_command("g.extension", extension="r.gwr") if bisquare: g.message("Calculating optimal bandwidth using bisqare kernel...") bw = g.parse_command( "r.gwr", mapx=crctd_lst, mapy="tmp_Calibration_points", kernel="bisquare", flags="ge", ) g.message("Running Fixed-GWR using bisqare kernel...") g.run_command( "r.gwr", mapx=crctd_lst, mapy="tmp_Calibration_points", estimates="tmp_bathymetry", kernel="bisquare", bandwidth=int(bw["estimate"]), ) else: g.message("Calculating optimal bandwidth using gaussian kernel...") bw = g.parse_command("r.gwr", mapx=crctd_lst, mapy="tmp_Calibration_points", flags="ge") g.message("Running Fixed-GWR using gaussian kernel...") g.run_command( "r.gwr", mapx=crctd_lst, mapy="tmp_Calibration_points", estimates="tmp_bathymetry", bandwidth=int(bw["estimate"]), ) else: global r global predict try: # For GWmodel in R r = g.tempfile() r_file = open(r, "w") libs = ["GWmodel", "data.table", "rgrass7", "rgdal", "raster"] for i in libs: install = 'if(!is.element("%s", installed.packages()[,1])){\n' % i install += "cat('\\n\\nInstalling %s package from CRAN\n')\n" % i install += "if(!file.exists(Sys.getenv('R_LIBS_USER'))){\n" install += "dir.create(Sys.getenv('R_LIBS_USER'), recursive=TRUE)\n" install += ".libPaths(Sys.getenv('R_LIBS_USER'))}\n" install += ('install.packages("%s", repos="http://cran.us.r-' 'project.org")}\n' % i) r_file.write(install) libraries = "library(%s)\n" % i r_file.write(libraries) Green_new, sep, tail = Green.partition("@") r_file.write('grass_file = readRAST("tmp_crctd%s")\n' % Green_new) r_file.write("raster_file = raster(grass_file)\n") frame_file = "pred = as.data.frame(raster_file,na.rm = TRUE,xy = TRUE)\n" r_file.write(frame_file) for i in li: j, sep, tail = i.partition("@") Green_new, sep, tail = Green.partition("@") tmp_ = RasterRow(str(i)) if tmp_.exist() is False: continue r_file.write('grass_file = readRAST("tmp_crctd%s")\n' % j) r_file.write("raster_file = raster(grass_file)\n") r_file.write( "frame_pred%s = as.data.frame(raster_file, na.rm = TRUE," "xy = TRUE)\n" % j) pred_file = "frame_pred_green=data.frame(frame_pred%s)\n" % Green_new pred_file += "pred=merge(pred, frame_pred%s)\n" % j r_file.write(pred_file) # For reference_file repeat with MASK g.run_command("r.mask", raster="tmp_Calibration_points", overwrite=True) r_file.write('grass_file=readRAST("%s")\n' % "tmp_Calibration_points") r_file.write("raster_file = raster(grass_file)\n") frame_file = ( "calib = as.data.frame(raster_file,na.rm = TRUE ," "xy = TRUE)\n") r_file.write(frame_file) for i in li: j, sep, tail = i.partition("@") tmp_ = RasterRow(str(i)) if tmp_.exist() is False: continue r_file.write('grass_file = readRAST("tmp_crctd%s")\n' % j) r_file.write("raster_file = raster(grass_file)\n") r_file.write( "frame_ref%s = as.data.frame(raster_file,na.rm = TRUE," "xy = TRUE)\n" % j) ref_file = "calib = merge(calib, frame_ref%s)\n" % j r_file.write(ref_file) g.run_command("g.remove", type="raster", pattern="MASK", flags="f") ref_file = "Rapid_ref.sdf=SpatialPointsDataFrame(calib[,1:2],calib)\n" ref_file += "Rapid_pred.sdf=SpatialPointsDataFrame(pred[,1:2]," "pred)\n" ref_file += ("DM_Rapid_ref.sdf=gw.dist(dp.locat=coordinates" "(Rapid_ref.sdf))\n") r_file.write(ref_file) l = [] predict = g.read_command("g.tempfile", pid=os.getpid()).strip() + ".txt" # Join the corrected bands in to a string le = len(crctd_lst) for i in crctd_lst: l.append(i) k = "+".join(l) if bisquare: ref_flag = ("cat('\nCalculating optimal bandwidth using " "bisquare kernel..\n')\n") ref_flag += ( "BW_Rapid_ref.sdf=bw.gwr(tmp_Calibration_points~%s," 'data=Rapid_ref.sdf, kernel="bisquare",' "adaptive=TRUE, dMat=DM_Rapid_ref.sdf)\n" % k) ref_flag += "cat('\nCalculating euclidean distance\n')\n" ref_flag += ("DM_Rapid_pred.sdf=gw.dist(dp.locat=coordinates" "(Rapid_ref.sdf), rp.locat=coordinates" "(Rapid_pred.sdf))\n") ref_flag += "cat('\nRunning A-GWR using bisquare kernel\n')\n" ref_flag += ( "GWR_Rapid_pred.sdf=gwr.predict(tmp_Calibration_poi" "nts~%s,data=Rapid_ref.sdf, bw = BW_Rapid_ref.sdf," 'predictdata = Rapid_pred.sdf, kernel = "bisquare",' "adaptive = TRUE, dMat1 = DM_Rapid_pred.sdf," "dMat2 = DM_Rapid_ref.sdf)\n" % k) r_file.write(ref_flag) if not bisquare: ref_fla = ("cat('\nCalculating optimal bandwidth using " "gaussian kernel..\n')\n") ref_fla += ( "BW_Rapid_ref.sdf=bw.gwr(tmp_Calibration_points~%s," 'data=Rapid_ref.sdf, kernel="gaussian",' "adaptive=TRUE, dMat= DM_Rapid_ref.sdf)\n" % k) ref_fla += "cat('\nCalculating euclidean distance\n')\n" ref_fla += ("DM_Rapid_pred.sdf=gw.dist(dp.locat=coordinates" "(Rapid_ref.sdf), rp.locat=coordinates" "(Rapid_pred.sdf))\n") ref_fla += "cat('\nRunning A-GWR using gaussian kernel\n')\n" ref_fla += ( "GWR_Rapid_pred.sdf = gwr.predict(tmp_Calibration_poi" "nts~%s,data=Rapid_ref.sdf, bw=BW_Rapid_ref.sdf," 'predictdata = Rapid_pred.sdf, kernel = "gaussian",' "adaptive = TRUE, dMat1 = DM_Rapid_pred.sdf," "dMat2 = DM_Rapid_ref.sdf)\n" % k) r_file.write(ref_fla) ref_fil = "Sp_frame = as.data.frame(GWR_Rapid_pred.sdf$SDF)\n" r_file.write(ref_fil) export = 'write.table(Sp_frame, quote=FALSE, sep=",",' '"%s")\n' % predict r_file.write(export) r_file.close() subprocess.check_call(["Rscript", r], shell=False) g.run_command( "r.in.xyz", input=predict, output="tmp_bathymetry", skip=1, separator=",", x=(int(le) + 5), y=(int(le) + 6), z=(int(le) + 3), overwrite=True, ) except subprocess.CalledProcessError: g.message("Integer outflow... ") if not g.find_program("r.gwr", "--help"): g.run_command("g.extension", extension="r.gwr") if bisquare: g.message("Running Fixed-GWR using bisqare kernel...") bw = g.parse_command( "r.gwr", mapx=crctd_lst, mapy="tmp_Calibration_points", kernel="bisquare", flags="ge", ) g.run_command( "r.gwr", mapx=crctd_lst, mapy="tmp_Calibration_points", estimates="tmp_bathymetry", kernel="bisquare", bandwidth=int(bw["estimate"]), ) else: g.message("Running Fixed-GWR using gaussian kernel...") bw = g.parse_command("r.gwr", mapx=crctd_lst, mapy="tmp_Calibration_points", flags="ge") g.run_command( "r.gwr", mapx=crctd_lst, mapy="tmp_Calibration_points", estimates="tmp_bathymetry", bandwidth=int(bw["estimate"]), ) tmp_rslt_ext = g.parse_command("r.univar", map="tmp_Calibration_points", flags="g") g.mapcalc(exp="{bathymetry}=if({tmp_SDB}>{max_}, null()," "if({tmp_SDB}<{min_}, null(), {tmp_SDB}))".format( tmp_SDB="tmp_bathymetry", bathymetry=bathymetry, max_=float(tmp_rslt_ext["max"]), min_=float(tmp_rslt_ext["min"]), ))