def main(): mon = grass.gisenv().get('MONITOR', None) if not mon: grass.fatal(_("No graphics device selected. Use d.mon to select graphics device.")) monCmd = grass.parse_command('d.mon', flags='g').get('cmd', None) if not monCmd or not os.path.isfile(monCmd): grass.fatal(_("Unable to open file '%s'") % monCmd) try: fd = open(monCmd, 'r') cmdList = fd.readlines() grass.run_command('d.erase') for cmd in cmdList: grass.call(split(cmd)) except IOError as e: grass.fatal(_("Unable to open file '%s' for reading. Details: %s") % (monCmd, e)) fd.close() # restore cmd file try: fd = open(monCmd, "w") fd.writelines(cmdList) except IOError as e: grass.fatal(_("Unable to open file '%s' for writing. Details: %s") % (monCmd, e)) return 0
def __del__(self): Debug.msg(1, "IRDigit.__del__()") if self.saveMap: for obj in self.objects: if obj.ftype == GV_BOUNDARY: self.polyfile.write("AREA\n") for coor in obj.coords: east, north = coor locbuf = " %s %s\n" % (east, north) self.polyfile.write(locbuf) elif obj.ftype == GV_LINE: self.polyfile.write("LINE\n") for coor in obj.coords: east, north = coor locbuf = " %s %s\n" % (east, north) self.polyfile.write(locbuf) catbuf = "=%d a\n" % (obj.catId) self.polyfile.write(catbuf) self.polyfile.close() region_settings = grass.parse_command('g.region', flags='p', delimiter=':') RunCommand('r.in.poly', input=self.polyfile.name, rows=region_settings['rows'], output=self.getOutputName(), overwrite=True) os.unlink(self.polyfile.name)
def difference(real_elev, scanned_elev, new, zexag=1, env=None): """Compute difference and set color table using standard deviations""" tmp = 'tmp_resampled' gcore.run_command('r.resamp.interp', input=real_elev, output=tmp, method='bilinear', env=env) grast.mapcalc(f"{new} = {tmp} - {scanned_elev}", env=env) univar = gcore.parse_command("r.univar", flags="g", map=real_elev, env=env) std1 = zexag * float(univar["stddev"]) std2 = zexag * 2 * std1 std3 = zexag * 3 * std1 rules = [ f"-1000000 black", f"-{std3} black", f"-{std2} 202:000:032", f"-{std1} 244:165:130", "0 247:247:247", f"{std1} 146:197:222", f"{std2} 5:113:176", f"{std3} black", f"1000000 black", ] gcore.write_command("r.colors", map=new, rules="-", stdin="\n".join(rules), env=env)
def compute_a(threshold, dtm, stream, a_river, acc): """ Compute the area of the basin for each pixel of stream the accumulation map and the new streams """ pid = os.getpid() info = gcore.parse_command('g.region', flags='pgm') area_px = float(info['nsres']) * float(info['ewres']) tmp_stream = 'tmprgreen_%i_stream' % pid tmp_tmp_stream = 'tmprgreen_%i_tmpstream' % pid gcore.run_command('r.watershed', elevation=dtm, threshold=threshold, accumulation=acc, stream=tmp_stream, memory=3000) gcore.run_command('r.thin', input=tmp_stream, output=tmp_tmp_stream) gcore.run_command('r.to.vect', flags='v', overwrite=True, input=tmp_tmp_stream, output=stream, type='line') command = "%s = abs(%s)/1000000.0 * %f" % (a_river, acc, area_px) mapcalc(command, overwrite=True)
def difference(real_elev, scanned_elev, new, env): """!Computes difference of original and scanned (scan - orig).""" regression='regression' regression_params = gcore.parse_command('r.regression.line', flags='g', mapx=scanned_elev, mapy=real_elev, env=env) gcore.run_command('r.mapcalc', expression='{regression} = {a} + {b} * {before}'.format(a=regression_params['a'], b=regression_params['b'], before=scanned_elev, regression=regression), env=env) gcore.run_command('r.mapcalc', expression='{difference} = {regression} - {after}'.format(regression=regression, after=real_elev, difference=new), env=env) gcore.run_command('r.colors', map=new, color='differences', env=env)
def _search_map(keywords, AND, maptype, info_cmd, flags): result = [] for mapfile in grass.parse_command('g.list', type=maptype): try: datas = grass.parse_command(info_cmd, map=mapfile, flags=flags) for param in datas: if check(keywords, datas[param], AND): result.append({ 'name': mapfile, 'attributes': { param: datas[param] } }) except CalledModuleError as module_error: pass return result
def classify_colors(new, group, compactness=2, threshold=0.3, minsize=10, useSuperPixels=True, env=None): segment = 'tmp_segment' segment_clump = 'tmp_segment_clump' # we expect this name of signature signature = 'signature' classification = 'tmp_classification' filtered_classification = 'tmp_filtered_classification' reject = 'tmp_reject' if useSuperPixels: try: gcore.run_command('i.superpixels.slic', input=group, output=segment, compactness=compactness, minsize=minsize, env=env) except CalledModuleError as e: print('i.superpixels.slic failed') print(e) else: gcore.run_command('i.segment', group=group, output=segment, threshold=threshold, minsize=minsize, env=env) gcore.run_command('r.clump', input=segment, output=segment_clump, env=env) gcore.run_command('i.smap', group=group, subgroup=group, signaturefile=signature, output=classification, goodness=reject, env=env) percentile = float( gcore.parse_command('r.univar', flags='ge', map=reject, env=env)['percentile_90']) grast.mapcalc('{new} = if({classif} < {thres}, {classif}, null())'.format( new=filtered_classification, classif=classification, thres=percentile), env=env) segments = segment if useSuperPixels else segment_clump gcore.run_command('r.mode', base=segments, cover=filtered_classification, output=new, env=env)
def firsttimeGRASS(infiles, adminfile, maskfile): """ Run a maxlikelihood unsupervised classification on the data nclasses: number of expected classes infiles: list of raster files to import and process firstime: if firsttime, it will import all files in GRASS """ from grass_session import Session from grass.script import core as gcore from grass.pygrass.modules.shortcuts import raster as r from grass.pygrass.modules.shortcuts import vector as v from grass.pygrass.modules.shortcuts import general as g from grass.pygrass.modules.shortcuts import imagery as i # create a new location from EPSG code (can also be a GeoTIFF or SHP or ... file) with Session(gisdb="/tmp", location="loc", create_opts="EPSG:4326"): # First run, needs to import the files and create a mask # Import admin boundary #v.import_(input=adminfile,output="admin",quiet=True,superquiet=True) gcore.parse_command("v.import", input=adminfile, output="admin", quiet=True) # Set computational region to admin boundary g.region(flags="s", vector="admin", quiet=True) # Keep only file name for output outmf = maskfile.split("/")[-1] # Import Mask file r.in_gdal(input=maskfile, output=outmf, quiet=True) # Apply Mask r.mask(raster=outmf, maskcats="0", quiet=True) # Set computational resolution to mask pixel size g.region(flags="s", raster=outmf, quiet=True) # Import files for f in infiles: # Keep only file name for output outf = f.split("/")[-1] # Landsat files not in Geo lat long needs reproj on import #r.import_(input=f,output=outf,quiet=True) gcore.parse_command("r.import", input=f, output=outf, quiet=True) # Create group i.group(group="l8", subgroup="l8", input=outf, quiet=True)
def get_coo(raster, i, j): """ Given a raster map and the index i and j of the corresponded array obtained with Raster Row it computes the coordinate p_x and p_y """ info = gcore.parse_command('r.info', flags='g', map=raster) p_y = ((float(info['north'])) - i * (float(info['nsres'])) - 0.5 * (float(info['nsres']))) p_x = ((float(info['west'])) + j * (float(info['ewres'])) + 0.5 * (float(info['ewres']))) return p_x, p_y
def match_scan(base, scan, matched, env): """Vertically match scan to base using linear regression""" coeff = gcore.parse_command('r.regression.line', mapx=scan, mapy=base, flags='g', env=env) grast.mapcalc(exp="{matched} = {a} + {b} * {scan}".format(matched=matched, scan=scan, a=coeff['a'], b=coeff['b']), env=env)
def writeArea(self, coords, rasterName): polyfile = tempfile.NamedTemporaryFile(delete=False) polyfile.write("AREA\n") for coor in coords: east, north = coor point = " %s %s\n" % (east, north) polyfile.write(point) catbuf = "=%d a\n" % self.catId polyfile.write(catbuf) self.catId = self.catId + 1 polyfile.close() region_settings = grass.parse_command("g.region", flags="p", delimiter=":") pname = polyfile.name.split("/")[-1] tmpraster = "rast_" + pname tmpvector = "vect_" + pname wx.BeginBusyCursor() wx.GetApp().Yield() RunCommand( "r.in.poly", input=polyfile.name, output=tmpraster, rows=region_settings["rows"], overwrite=True, ) RunCommand("r.to.vect", input=tmpraster, output=tmpvector, type="area", overwrite=True) RunCommand("v.to.rast", input=tmpvector, output=rasterName, value=1, use="val") wx.EndBusyCursor() grass.use_temp_region() grass.run_command("g.region", vector=tmpvector) region = grass.region() marea = MaskedArea(region, rasterName) RunCommand("g.remove", flags="f", type="raster", name=tmpraster) RunCommand("g.remove", flags="f", type="vector", name=tmpvector) os.unlink(polyfile.name) return marea
def test_extract_cats(self): # run mapcalc for reference expr = "{o} = if ({i} <= 27513 || {i} == 27529 || ({i} >= 27601 && {i} <= 27605) || {i} >= 27608, {i}, null())" self.runModule("r.mapcalc", expression=expr.format(o=self.mapcalc_output, i=self.inp)) # run the module self.assertModule( "r.extract", input=self.inp, output=self.output, cats="-27513,27529,27601-27605,27608-", ) # check to see if output file exists self.assertRasterExists(self.output, msg="Output raster does not exist") # compare univars, because comparing rasters doesn't take nulls into account correctly out_univar = parse_command("r.univar", map=self.output, flags="g") mapcalc_univar = parse_command("r.univar", map=self.mapcalc_output, flags="g") self.assertDictEqual(out_univar, mapcalc_univar)
def main(): options, flags = gcore.parser() gisenv = gcore.gisenv() if 'MONITOR' in gisenv: cmd_file = gcore.parse_command('d.mon', flags='g')['cmd'] dout_cmd = 'd.out.file' for param, val in options.items(): if val: dout_cmd += " {param}={val}".format(param=param, val=val) with open(cmd_file, "a") as file_: file_.write(dout_cmd) else: gcore.fatal(_("No graphics device selected. Use d.mon to select graphics device."))
def dissolve_lines(in_vec, out_vec): """ If more lines are present for the same category it merges them """ gcore.run_command('v.clean', overwrite=True, input=in_vec, output=out_vec, tool='break,snap,rmdupl,rmarea,rmline,rmsa') info = gcore.parse_command('v.category', input=out_vec, option='print') for i in info.keys(): #import ipdb; ipdb.set_trace() gcore.run_command('v.edit', map=out_vec, tool='merge', cats=i)
def CalcStats(imap): stats = {} result = g.parse_command("r.univar", map=imap, quiet=True).keys() n = len(result) i = 0 while i < n: if(result[i][0:2] == "mi"): stats['min'] = float(result[i].split(" ")[1]) elif(result[i][0:2] == "ma"): stats['max'] = float(result[i].split(" ")[1]) elif(result[i][0:2] == "su"): stats['sum'] = float(result[i].split(" ")[1]) elif(result[i][0:2] == "me" and len(result[i].split(" ")) == 2): stats['mean'] = float(result[i].split(" ")[1]) elif(result[i][0:2] == "n:"): stats['n'] = int(result[i].split(" ")[1]) i += 1 return stats
def check_multilines(vector): vector, mset = vector.split('@') if '@' in vector else (vector, '') msgr = get_msgr() vec = VectorTopo(vector, mapset=mset, mode='r') vec.open("r") info = gcore.parse_command('v.category', input=vector, option='print') for i in info.keys(): vec.cat(int(i), 'lines', 1) # if i == '28': # import ipdb; ipdb.set_trace() if len(vec.cat(int(i), 'lines', 1)) > 1: # import ipdb; ipdb.set_trace() warn = ("Multilines for the same category %s" % i) msgr.warning(warn) vec.close()
def main(): options, flags = gcore.parser() gisenv = gcore.gisenv() if 'MONITOR' in gisenv: cmd_file = gcore.parse_command('d.mon', flags='g').get('cmd', None) if not cmd_file: gcore.fatal(_("Unable to open file '%s'") % cmd_file) dout_cmd = 'd.what.vect' for param, val in options.iteritems(): if val: dout_cmd += " {param}={val}".format(param=param, val=val) with open(cmd_file, "a") as file_: file_.write(dout_cmd) else: gcore.fatal(_("No graphics device selected. Use d.mon to select graphics device."))
def compute_q(threshold, q_spec, q_river, dtm): """ Compute the discharge along the river given the specific discharge """ pid = os.getpid() info = gcore.parse_command('g.region', flags='pgm') area_px = float(info['nsres']) * float(info['ewres']) q_cum = "tmprgreen_%i_q_cum" % pid gcore.run_command('r.watershed', elevation=dtm, flow=q_spec, threshold=threshold, accumulation=q_cum, memory=3000) command = "%s=abs(%s/1000.0* %f/1000000.0)" % (q_river, q_cum, area_px) mapcalc(command)
def main(): options, flags = gcore.parser() gisenv = gcore.gisenv() if 'MONITOR' in gisenv: cmd_file = gcore.parse_command('d.mon', flags='g').get('cmd', None) if not cmd_file: gcore.fatal(_("Unable to open file '%s'") % cmd_file) dout_cmd = 'd.what.vect' for param, val in options.items(): if val: dout_cmd += " {param}={val}".format(param=param, val=val) with open(cmd_file, "a") as file_: file_.write(dout_cmd) else: gcore.fatal(_("No graphics device selected. Use d.mon to select graphics device."))
def check_multilines(vector): vector, mset = vector.split("@") if "@" in vector else (vector, "") msgr = get_msgr() vec = VectorTopo(vector, mapset=mset, mode="r") vec.open("r") info = gcore.parse_command("v.category", input=vector, option="print") for i in info.keys(): vec.cat(int(i), "lines", 1) # if i == '28': # import ipdb; ipdb.set_trace() if len(vec.cat(int(i), "lines", 1)) > 1: # import ipdb; ipdb.set_trace() warn = "Multilines for the same category %s" % i msgr.warning(warn) vec.close()
def change_detection(before, after, change, height_threshold, cells_threshold, add, max_detected, debug, env): diff_thr = 'diff_thr_' + str(uuid.uuid4()).replace('-', '') diff_thr_clump = 'diff_thr_clump_' + str(uuid.uuid4()).replace('-', '') coeff = gcore.parse_command('r.regression.line', mapx=after, mapy=before, flags='g', env=env) grast.mapcalc('diff = {a} + {b} * {after} - {before}'.format(a=coeff['a'], b=coeff['b'],before=before,after=after), env=env) try: if add: grast.mapcalc("{diff_thr} = if(({a} + {b} * {after} - {before}) > {thr1} &&" " ({a} + {b} * {after} - {before}) < {thr2}, 1, null())".format(a=coeff['a'], b=coeff['b'], diff_thr=diff_thr, after=after, before=before, thr1=height_threshold[0], thr2=height_threshold[1]), env=env) else: grast.mapcalc("{diff_thr} = if(({before} - {a} + {b} * {after}) > {thr}, 1, null())".format(diff_thr=diff_thr, a=coeff['a'], b=coeff['b'], after=after, before=before, thr=height_threshold), env=env) gcore.run_command('r.clump', input=diff_thr, output=diff_thr_clump, env=env) stats = gcore.read_command('r.stats', flags='cn', input=diff_thr_clump, sort='desc', env=env).strip().splitlines() if debug: print 'DEBUG: {}'.format(stats) if len(stats) > 0 and stats[0]: cats = [] found = 0 for stat in stats: if found >= max_detected: break if float(stat.split()[1]) < cells_threshold[1] and float(stat.split()[1]) > cells_threshold[0]: # larger than specified number of cells found += 1 cat, value = stat.split() cats.append(cat) if cats: rules = ['{c}:{c}:1'.format(c=c) for c in cats] gcore.write_command('r.recode', input=diff_thr_clump, output=change, rules='-', stdin='\n'.join(rules), env=env) gcore.run_command('r.volume', flags='f', input=change, clump=diff_thr_clump, centroids=change, env=env) else: gcore.warning("No change found!") gcore.run_command('v.edit', map=change, tool='create', env=env) else: gcore.warning("No change found!") gcore.run_command('v.edit', map=change, tool='create', env=env) gcore.run_command('g.remove', flags='f', type=['raster'], name=[diff_thr, diff_thr_clump], env=env) except: gcore.run_command('g.remove', flags='f', type=['raster'], name=[diff_thr, diff_thr_clump], env=env)
def consulta_punto(args): ''' Toma una lista de coordenadas con [[lon, lat, idpix], lista] Salida: crea un archivo por consulta ''' import grass.script.core as grass import os lon = args[0][0] lat = args[0][1] id_pixel = args[0][2] lista = args[1] nombre = os.path.join(os.getcwd() + '/salida_' + str(lon) + '_' + str(lat) + '.csv') ##nombre_archivo = 'salida_' + str(lon) + '_' + str(lat) + '.csv' ##nombre = os.path.join(ruta_salida, nombre_salida) archivo_salida = open(nombre, 'w') coordenadas = str(lon)+","+str(lat) print 'procesando coordenadas --> ', coordenadas dato = grass.parse_command('r.what', flags = 'n', input = lista, east_north = coordenadas) try: zonda = float((dato.items()[1])[0].split('|')[-1]) except: zonda = (dato.items()[1])[0].split('|')[-1] if isinstance(zonda, float) == True: valores = (dato.items()[1])[0].split('|') encabezado = (dato.items()[0])[0].split('|') elif isinstance(zonda, str) == True: valores = (dato.items()[0])[0].split('|') encabezado = (dato.items()[1])[0].split('|') lat = valores[0] lon = valores[1] for i in range(3,len(valores)): archivo_salida.write(str(lat)+','+str(lon)+','+str(id_pixel)+','+str(valores[i])+','+str(encabezado[i])+'\n') archivo_salida.close()
def change_detection_area(before, after, change, height_threshold, filter_slope_threshold, add, env): """Detects change in area. Result are areas with value equals the max difference between the scans as a positive value.""" slope = 'slope_tmp_get_change' before_after_regression = 'before_after_regression_tmp' # slope is used to filter areas of change with high slope (edge of model) gcore.run_command('r.slope.aspect', elevation=before, slope=slope, env=env) if add: after, before = before, after # regression reg_params = gcore.parse_command('r.regression.line', flags='g', mapx=before, mapy=after, env=env) grast.mapcalc(exp='{before_after_regression} = {a} + {b} * {before}'.format(a=reg_params['a'], b=reg_params['b'], before=before, before_after_regression=before_after_regression), env=env) grast.mapcalc(exp="{change} = if({slope} < {filter_slope_threshold} && {before_after_regression} - {after} > {min_z_diff}, {before_after_regression} - {after}, null())".format( change=change, slope=slope, filter_slope_threshold=filter_slope_threshold, before_after_regression=before_after_regression, after=after, min_z_diff=height_threshold), env=env) gcore.run_command('g.remove', type='raster', name=['slope_tmp_get_change', 'before_after_regression_tmp'], flags='f', env=env)
def writeArea(self, coords, rasterName): polyfile = tempfile.NamedTemporaryFile(delete=False) polyfile.write("AREA\n") for coor in coords: east, north = coor point = " %s %s\n" % (east, north) polyfile.write(point) catbuf = "=%d a\n" % self.catId polyfile.write(catbuf) self.catId = self.catId + 1 polyfile.close() region_settings = grass.parse_command('g.region', flags='p', delimiter=':') pname = polyfile.name.split('/')[-1] tmpraster = "rast_" + pname tmpvector = "vect_" + pname wx.BeginBusyCursor() wx.Yield() RunCommand('r.in.poly', input=polyfile.name, output=tmpraster, rows=region_settings['rows'], overwrite=True) RunCommand('r.to.vect', input=tmpraster, output=tmpvector, type='area', overwrite=True) RunCommand('v.to.rast', input=tmpvector, output=rasterName, value=1, use='val') wx.EndBusyCursor() grass.use_temp_region() grass.run_command('g.region', vector=tmpvector) region = grass.region() marea = MaskedArea(region, rasterName) RunCommand('g.remove', flags='f', type='raster', name=tmpraster) RunCommand('g.remove', flags='f', type='vector', name=tmpvector) os.unlink(polyfile.name) return marea
# take directory from command line input and change path path = sys.argv[1] if len(sys.argv) <1: print "You should provide the directory path as a parameter" sys.exit(1) # now change the path os.chdir( path ) grass.run_command('g.region', rast='DEM20m') files = ['2001_NC_PhaseI_raw.txt', '2004_PreIvan_raw.txt', '2010_USACE_SE_raw.txt', '2011_post_irene_NC_raw.txt'] resolutions=[1, 2, 5, 10] #resolution .5 was too large for memory report='date\tres\tn\trange\n' for f in files: report += f + '\n' for res in resolutions: report += '\t' + str(res) + '\t' #set the resolution grass.run_command('g.region', res=res) #Get the per cell count grass.run_command('r.in.xyz', input=f, output='FM_stats_n', separator=',', skip=1, method='n', overwrite=True) grass.run_command('r.null', map='FM_stats_n', setnull=0) #Get the mean per cell count stats = grass.parse_command('r.univar', flags='eg', map='FM_stats_n') report += str(stats['mean']) + '\t' #Get the per cell range grass.run_command('r.in.xyz', input=f, output='FM_stats_range', separator=',', skip=1, method='range', overwrite=True) grass.run_command('r.mapcalc', expression='FM_stats_range_c=if(isnull(FM_stats_n), null(), FM_stats_range)', overwrite=True) #Get the mean per cell range stats = grass.parse_command('r.univar', flags='eg', map='FM_stats_range_c') report += str(stats['mean']) + '\n' print(report)
def check_monitor_file(monitor, ftype='env'): mfile = parse_command('d.mon', flags='g').get(ftype, None) if mfile is None or not os.path.isfile(mfile): fatal(_("Unable to get monitor info (no %s found)") % var) return mfile
# Then mapcalc with the Mississippi drainage basins to get the meteoric contribution to the Mississippi # And sum to get the time series to go along with the ice melt one Mississippi = [] Meteoric = [] for i in range(len(age)): Mississippi.append("Mississippi_" + age[i]) Meteoric.append("Meteoric_" + age[i]) # wb[i] = [mm/s]; cellsize_meters2 = [m**2]; Mississippi[i] = [-]; / 1000 turns [mm] --> [m] --> so [m**3/s] for i in range(len(age)): mapcalc(Meteoric[i] + ' = ' + Mississippi[i] + ' * ' + wb[i] + ' * cellsize_meters2 / 1000' ) Qmeteoric = [] for i in range(len(age)): Qmeteoric.append(float(grass.parse_command('r.sum', rast=Meteoric[i])['SUM'])) # Output with ages # Ages as ints age_a = [] extra_zeros = '00' for i in range(len(wb)): age_ka = age[i][:2] age_ca = age[i][-2] age_a_str = age_ka + age_ca + extra_zeros # string age_a.append(int(age_a_str)) mean_age_int = [] mean_age = []
def main(): # Take into account those extra pixels we'll be a addin' max_cols = int(options['maxcols']) - int(options['overlap']) max_rows = int(options['maxrows']) - int(options['overlap']) if max_cols == 0: gcore.fatal(_("It is not possibile to set 'maxcols=%s' and " "'overlap=%s'. Please set maxcols>overlap" % (options['maxcols'], options['overlap']))) elif max_rows == 0: gcore.fatal(_("It is not possibile to set 'maxrows=%s' and " "'overlap=%s'. Please set maxrows>overlap" % (options['maxrows'], options['overlap']))) # destination projection if not options['destproj']: dest_proj = gcore.read_command('g.proj', quiet=True, flags='jf').rstrip('\n') if not dest_proj: gcore.fatal(_('g.proj failed')) else: dest_proj = options['destproj'] gcore.debug("Getting destination projection -> '%s'" % dest_proj) # projection scale if not options['destscale']: ret = gcore.parse_command('g.proj', quiet=True, flags='j') if not ret: gcore.fatal(_('g.proj failed')) if '+to_meter' in ret: dest_scale = ret['+to_meter'].strip() else: gcore.warning( _("Scale (%s) not found, assuming '1'") % '+to_meter') dest_scale = '1' else: dest_scale = options['destscale'] gcore.debug('Getting destination projection scale -> %s' % dest_scale) # set up the projections srs_source = {'proj': options['sourceproj'], 'scale': float(options['sourcescale'])} srs_dest = {'proj': dest_proj, 'scale': float(dest_scale)} if options['region']: gcore.run_command('g.region', quiet=True, region=options['region']) dest_bbox = gcore.region() gcore.debug('Getting destination region') # output field separator fs = separator(options['separator']) # project the destination region into the source: gcore.verbose('Projecting destination region into source...') dest_bbox_points = bboxToPoints(dest_bbox) dest_bbox_source_points, errors_dest = projectPoints(dest_bbox_points, source=srs_dest, dest=srs_source) if len(dest_bbox_source_points) == 0: gcore.fatal(_("There are no tiles available. Probably the output " "projection system it is not compatible with the " "projection of the current location")) source_bbox = pointsToBbox(dest_bbox_source_points) gcore.verbose('Projecting source bounding box into destination...') source_bbox_points = bboxToPoints(source_bbox) source_bbox_dest_points, errors_source = projectPoints(source_bbox_points, source=srs_source, dest=srs_dest) x_metric = 1 / dest_bbox['ewres'] y_metric = 1 / dest_bbox['nsres'] gcore.verbose('Computing length of sides of source bounding box...') source_bbox_dest_lengths = sideLengths(source_bbox_dest_points, x_metric, y_metric) # Find the skewedness of the two directions. # Define it to be greater than one # In the direction (x or y) in which the world is least skewed (ie north south in lat long) # Divide the world into strips. These strips are as big as possible contrained by max_ # In the other direction do the same thing. # Theres some recomputation of the size of the world that's got to come in # here somewhere. # For now, however, we are going to go ahead and request more data than is necessary. # For small regions far from the critical areas of projections this makes very little difference # in the amount of data gotten. # We can make this efficient for big regions or regions near critical # points later. bigger = [] bigger.append(max(source_bbox_dest_lengths['x'])) bigger.append(max(source_bbox_dest_lengths['y'])) maxdim = (max_cols, max_rows) # Compute the number and size of tiles to use in each direction # I'm making fairly even sized tiles # They differer from each other in height and width only by one cell # I'm going to make the numbers all simpler and add this extra cell to # every tile. gcore.message(_('Computing tiling...')) tiles = [-1, -1] tile_base_size = [-1, -1] tiles_extra_1 = [-1, -1] tile_size = [-1, -1] tileset_size = [-1, -1] tile_size_overlap = [-1, -1] for i in range(len(bigger)): # make these into integers. # round up bigger[i] = int(bigger[i] + 1) tiles[i] = int((bigger[i] / maxdim[i]) + 1) tile_size[i] = tile_base_size[i] = int(bigger[i] / tiles[i]) tiles_extra_1[i] = int(bigger[i] % tiles[i]) # This is adding the extra pixel (remainder) to all of the tiles: if tiles_extra_1[i] > 0: tile_size[i] = tile_base_size[i] + 1 tileset_size[i] = int(tile_size[i] * tiles[i]) # Add overlap to tiles (doesn't effect tileset_size tile_size_overlap[i] = tile_size[i] + int(options['overlap']) gcore.verbose("There will be %d by %d tiles each %d by %d cells" % (tiles[0], tiles[1], tile_size[0], tile_size[1])) ximax = tiles[0] yimax = tiles[1] min_x = source_bbox['w'] min_y = source_bbox['s'] max_x = source_bbox['e'] max_y = source_bbox['n'] span_x = (max_x - min_x) span_y = (max_y - min_y) xi = 0 tile_bbox = {'w': -1, 's': -1, 'e': -1, 'n': -1} if errors_dest > 0: gcore.warning(_("During computation %i tiles could not be created" % errors_dest)) while xi < ximax: tile_bbox['w'] = float( min_x) + (float(xi) * float(tile_size[0]) / float(tileset_size[0])) * float(span_x) tile_bbox['e'] = float(min_x) + (float(xi + 1) * float(tile_size_overlap[0] ) / float(tileset_size[0])) * float(span_x) yi = 0 while yi < yimax: tile_bbox['s'] = float( min_y) + (float(yi) * float(tile_size[1]) / float(tileset_size[1])) * float(span_y) tile_bbox['n'] = float(min_y) + ( float(yi + 1) * float(tile_size_overlap[1]) / float(tileset_size[1])) * float(span_y) tile_bbox_points = bboxToPoints(tile_bbox) tile_dest_bbox_points, errors = projectPoints(tile_bbox_points, source=srs_source, dest=srs_dest) tile_dest_bbox = pointsToBbox(tile_dest_bbox_points) if bboxesIntersect(tile_dest_bbox, dest_bbox): if flags['w']: print("bbox=%s,%s,%s,%s&width=%s&height=%s" % (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'], tile_bbox['n'], tile_size_overlap[0], tile_size_overlap[1])) elif flags['g']: print("w=%s;s=%s;e=%s;n=%s;cols=%s;rows=%s" % (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'], tile_bbox['n'], tile_size_overlap[0], tile_size_overlap[1])) else: print("%s%s%s%s%s%s%s%s%s%s%s" % (tile_bbox['w'], fs, tile_bbox['s'], fs, tile_bbox['e'], fs, tile_bbox['n'], fs, tile_size_overlap[0], fs, tile_size_overlap[1])) yi += 1 xi += 1
gcore.run_command('r3.borehole', overwrite=True, input=voxel, output=new, coordinates=','.join(coords_list), size=size, offset_size=offset, axes=axes, unit=unit, env=env) def classify_colors(new, group, compactness=2, threshold=0.3, minsize=10, useSuperPixels=True, env=None): segment = 'tmp_segment' segment_clump = 'tmp_segment_clump' # we expect this name of signature signature = 'signature' classification = 'tmp_classification' filtered_classification = 'tmp_filtered_classification' reject = 'tmp_reject' if useSuperPixels: try: gcore.run_command('i.superpixels.slic', input=group, output=segment, compactness=compactness, minsize=minsize, env=env) except CalledModuleError, e: print 'i.superpixels.slic failed' print e else: gcore.run_command('i.segment', group=group, output=segment, threshold=threshold, minsize=minsize, env=env) gcore.run_command('r.clump', input=segment, output=segment_clump, env=env) gcore.run_command('i.smap', group=group, subgroup=group, signaturefile=signature, output=classification, goodness=reject, env=env) percentile = float(gcore.parse_command('r.univar', flags='ge', map=reject, env=env)['percentile_90']) grast.mapcalc('{new} = if({classif} < {thres}, {classif}, null())'.format(new=filtered_classification, classif=classification, thres=percentile), env=env) segments = segment if useSuperPixels else segment_clump gcore.run_command('r.mode', base=segments, cover=filtered_classification, output=new, env=env)