def main(): import os # we want to run this repetetively without deleted the created files os.environ['GRASS_OVERWRITE'] = '1' elevation = 'elev_lid792_1m' elev_resampled = 'elev_resampled' # resampling to have similar resolution as with TL gs.run_command('g.region', raster=elevation, res=4, flags='a') gs.run_command('r.resamp.stats', input=elevation, output=elev_resampled) # this will run all 3 examples (slope, contours, points) # run_slope(scanned_elev=elev_resampled, env=None) # run_contours(scanned_elev=elev_resampled, env=None) # create points points = 'points' gs.write_command('v.in.ascii', flags='t', input='-', output=points, separator='comma', stdin='638432,220382\n638621,220607') # run_function_with_points(scanned_elev=elev_resampled, env=None, points=points) run_example(scanned_elev=elev_resampled, env=None) run_curvatures(scanned_elev=elev_resampled, env=None)
def main(): import os # get the current environment variables as a copy env = os.environ.copy() # we want to run this repetetively without deleted the created files env["GRASS_OVERWRITE"] = "1" elevation = "elev_lid792_1m" elev_resampled = "elev_resampled" # resampling to have similar resolution as with TL gs.run_command("g.region", raster=elevation, res=4, flags="a", env=env) gs.run_command("r.resamp.stats", input=elevation, output=elev_resampled, env=env) # create points points = "points" gs.write_command( "v.in.ascii", flags="t", input="-", output=points, separator="comma", stdin="638432,220382\n638621,220607", env=env, ) run_LCP(scanned_elev=elev_resampled, env=env, points=points)
def make_image(output_dir, table, grad, height, width): outfile = os.path.join(output_dir, "colortables", "%s.png" % table) os.environ['GRASS_RENDER_FILE'] = outfile grass.run_command("r.colors", map=grad, color=table, quiet=True) os.environ['GRASS_RENDER_FRAME'] = '%f,%f,%f,%f' % (0, height, 2, width - 2) grass.run_command("d.rast", map=grad, quiet=True) if 1: os.environ['GRASS_RENDER_FRAME'] = '%f,%f,%f,%f' % (0, height, 0, width) grass.write_command("d.graph", quiet=True, flags='m', stdin=""" width 1 color {outcolor} polyline {x1} {y1} {x2} {y1} {x2} {y2} {x1} {y2} {x1} {y1} color {incolor} polyline {x3} {y3} {x4} {y3} {x4} {y4} {x3} {y4} {x3} {y3} """.format(x1=1, x2=width, y1=0, y2=height - 1, x3=2, x4=width - 1, y3=1, y4=height - 2, outcolor='white', incolor='black'))
def _getSoillossbare(self,lsfactor,kfactor,rfactor,soillossbare): """!Calculate soilloss on bare soil A = R * K * LS A potential soil loss t/(ha*a) for bare soil LS LS-factor R rain erosivity factor K soil erodibility factor @return """ formula_soillossbare = "$soillossbare = $lsfactor * $kfactor * $rfactor" g.mapcalc(formula_soillossbare, soillossbare = soillossbare, lsfactor = lsfactor, kfactor = kfactor, rfactor = rfactor, quiet=quiet) rules = '\n '.join([ "0.0000 37:114:0", "20.0000 88:169:1", "30.0000 207:229:3", "40.0000 254:254:0", "55.0000 240:60:1", "100.0000 254:0:2", "150.0000 169:0:1", "250.0000 115:0:0", "500.0000 87:0:0", "50000.0000 87:0:0", ]) g.write_command("r.colors", map = soillossbare, rules = '-', stdin = rules, quiet = quiet) return soillossbare
def run_water(scanned_elev, env, **kwargs): # simwe analyses.simwe(scanned_elev=scanned_elev, depth="depth", rain_value=300, niterations=5, env=env) gs.write_command( "r.colors", map="depth", rules="-", stdin= "0.001 0:128:0\n0.05 0:255:255\n0.1 0:127:255\n0.5 0:0:255\n10 0:0:0", env=env, ) # ponds try: analyses.depression(scanned_elev=scanned_elev, new="ponds", repeat=3, filter_depth=0, env=env) except CalledModuleError: return
def run_flow(real_elev, scanned_elev, eventHandler, env, **kwargs): gscript.run_command('r.slope.aspect', elevation=scanned_elev, dx='dx', dy='dy', env=env) gscript.run_command('r.sim.water', elevation=scanned_elev, dx='dx', dy='dy', rain_value=300, depth='flow_flow', niterations=6, env=env) gscript.write_command( 'r.colors', map='flow_flow', rules='-', stdin= '0.001 0:128:0\n0.05 0:255:255\n0.1 0:127:255\n0.5 0:0:255\n10 0:0:0', env=env) # copy scan postfix = datetime.now().strftime('%H_%M_%S') prefix = 'flow' gscript.run_command( 'g.copy', raster=[scanned_elev, '{}_scan_{}'.format(prefix, postfix)], env=env)
def lines_sl(vector,res,dem,stoss_dradd,lee_dradd): """Classifies segments of the original reach file in stoss and lee faces, in reason of the heigth of start and ending point of each one read by DEM, for calculation issues the reach is extended of an half resolution length, so the first segment (outlet of the reach) has not to be considered""" grass.run_command('g.remove',vect='preso_punto,presoo,preso_ok,vector_pezzi,vector_pezzi_add,vector_pezzi_del,vector_drape_pezzi,vector_lee,vector_stoss,tratti_leep,tratti_stossp,tratti_lee_del,lee_dradd') newE=float((re.split('\n',grass.read_command('v.to.db',flags='p',map=vector,option='start',column='Es,Ns'))[1]).split('|')[1]) newN=float((re.split('\n',grass.read_command('v.to.db',flags='p',map=vector,option='start',column='Es,Ns'))[1]).split('|')[2]) grass.write_command('v.in.ascii',output='preso_punto',stdin=str(newE)+'|'+str(newN)) grass.run_command('v.net',input=vector,points='preso_punto',output='presoo',operation='connect',thresh=res) grass.run_command('v.edit',map='presoo',layer='1',type='line',tool='delete',thresh='-1,0,0',ids='1',snap='no') grass.run_command('v.edit',map='presoo',layer='1',type='point',tool='delete',thresh='-1,0,0',ids='1-999',snap='no') grass.run_command('v.build.polylines',input='presoo',output='preso_ok') grass.run_command('v.split',input='preso_ok',output='vector_pezzi',length=0.999*float(res)) grass.run_command('v.category',input='vector_pezzi',output='vector_pezzi_del',option='del',type='line',cat='1',step='1') grass.run_command('v.category',input='vector_pezzi_del',output='vector_pezzi_add',option='add',type='line',cat='1',step='1') grass.run_command('v.drape',input='vector_pezzi_add',type='line',rast=dem,output='vector_drape_pezzi',method='nearest',scale='1.0',layer='1') grass.run_command('v.db.addtable',map='vector_drape_pezzi',columns='cat integer,startE double precision,startN double precision,startZ double precision,endE double precision,endN double precision,endZ double precision') grass.run_command('v.to.db',map='vector_drape_pezzi',type='line',layer='1',qlayer='1',option='start',units='meters',columns='startE,startN,startZ') grass.run_command('v.to.db',map='vector_drape_pezzi',type='line',layer='1',qlayer='1',option='end',units='meters',columns='endE,endN,endZ') firstcat_endZ=float((grass.read_command('db.select',flags='c',table='vector_drape_pezzi').split('\n')[0]).split('|')[-1]) lastcat_endZ=float((grass.read_command('db.select',flags='c',table='vector_drape_pezzi').split('\n')[-2]).split('|')[-1]) if lastcat_endZ < firstcat_endZ: grass.run_command('v.extract',input='vector_drape_pezzi',output='vector_lee',type='line',layer='1',where="endZ<=startZ",new='-1') grass.run_command('v.extract',input='vector_drape_pezzi',output='vector_stoss',type='line',layer='1',where="endZ>startZ",new='-1') else: grass.run_command('v.extract',input='vector_drape_pezzi',output='vector_lee',type='line',layer='1',where="endZ>=startZ",new='-1') grass.run_command('v.extract',input='vector_drape_pezzi',output='vector_stoss',type='line',layer='1',where="endZ<startZ",new='-1') grass.run_command('v.build.polylines',input='vector_lee',output='tratti_leep',cats='no') grass.run_command('v.build.polylines',input='vector_stoss',output='tratti_stossp',cats='no') grass.run_command('v.category',input='tratti_stossp',output='tratti_stoss_del',option='del',type='line',layer='1',cat='1',step='1') grass.run_command('v.category',input='tratti_stoss_del',output=stoss_dradd,option='add',type='line',layer='1',cat='1',step='1') grass.run_command('v.category',input='tratti_leep',output='tratti_lee_del',option='del',type='line',layer='1',cat='1',step='1') grass.run_command('v.category',input='tratti_lee_del',output=lee_dradd,option='add',type='line',layer='1',cat='1',step='1') return lastcat_endZ,firstcat_endZ
def main(): import os # we want to run this repetetively without deleted the created files os.environ['GRASS_OVERWRITE'] = '1' elevation = 'elev_lid792_1m' elev_resampled = 'elev_resampled' # resampling to have similar resolution as with TL gs.run_command('g.region', raster=elevation, res=4, flags='a') gs.run_command('r.resamp.stats', input=elevation, output=elev_resampled) # create points points = 'points' gs.write_command('v.in.ascii', flags='t', input='-', output=points, separator='comma', stdin='638432,220382\n638621,220607') print("Hello!") LCP(scanned_elev=elev_resampled, env=None, points=points) # create points points = 'points' gs.write_command('v.in.ascii', flags='t', input='-', output=points, separator='comma', stdin='638432,220382\n638621,220607') print("Hiya!") LCP(scanned_elev=elev_resampled, env=None, points=points)
def create_heatmaps(vectors, background_ortho, radius, width, height): os.environ['GRASS_FONT'] = '/usr/share/fonts/truetype/freefont/FreeSansBold.ttf' names = [] for vector in vectors: gscript.run_command('v.kernel', input=vector, output=vector + '_kernel', radius=radius, overwrite=True, quiet=True) names.append(vector + '_kernel') gscript.write_command('r.colors', map=names, rules='-', stdin='0% white\n10% yellow\n40% red\n100% magenta') maxdens = float(gscript.parse_command('r.univar', map=names, flags='g')['max']) for vector in vectors: gscript.run_command('d.mon', start='cairo', output='foreground_' + vector + '_kernel' + '.png', width=width, height=height, overwrite=True) gscript.run_command('d.rast', map=vector + '_kernel') gscript.run_command('d.mon', stop='cairo') # background gscript.run_command('d.mon', start='cairo', output='background_' + vector + '_kernel' + '.png', width=width, height=height, overwrite=True) gscript.run_command('d.rast', map=background_ortho) gscript.run_command('d.legend', flags='t', raster=vector + '_kernel', label_step=0.5, digits=1, range=[0, maxdens], at=[3,40,3,6], color='white') gscript.run_command('d.mon', stop='cairo') # put together with transparency foreground = Image.open('foreground_' + vector + '_kernel' + '.png') background = Image.open('background_' + vector + '_kernel' + '.png') foreground = foreground.convert("RGBA") datas = foreground.getdata() newData = [] for item in datas: intens = item[0] + item[1] + item[2] newData.append((item[0], item[1], item[2], min(765-intens, 200))) foreground.putdata(newData) background.paste(foreground, (0, 0), foreground) background.save('heatmap_{v}.png'.format(v=vector), "PNG") gscript.try_remove('foreground_' + vector + '_kernel' + '.png') gscript.try_remove('background_' + vector + '_kernel' + '.png')
def run_flow(real_elev, scanned_elev, scanned_calib_elev, env, **kwargs): threshold = 30 flowacc = 'flowacc' drain = 'drainage' stream = 'stream' basin = 'basin' env2 = get_environment(raster=scanned_calib_elev) change_detection(before=scanned_calib_elev, after=scanned_elev, change='change', height_threshold=[150, 1000], cells_threshold=[20, 140], add=True, max_detected=10, debug=True, env=env) # detected points points = gscript.read_command('v.out.ascii', input='change', type='point', format='point').strip().splitlines() if points: x, y, cat = points[0].split('|') gscript.run_command('r.stream.snap', input='change', output='outlet', stream_rast=stream, accumulation=flowacc, radius=10, env=env2) outlets = gscript.read_command('v.out.ascii', input='outlet', type='point', format='point').strip().splitlines() x2, y2, cat2 = outlets[0].split('|') gscript.run_command('r.water.outlet', input=drain, output=basin, coordinates=(x2, y2), env=env2) gscript.write_command('r.colors', map=basin, rules='-', stdin='0% indigo\n100% indigo', env=env2) # drain gscript.run_command('r.drain', input="hydrodem", output="drain", drain="drain", start_coordinates=(x2, y2), env=env2) else: gscript.mapcalc('basin = null()', env=env) gscript.run_command('v.edit', tool='create', map='drain', env=env) gscript.run_command('r.watershed', elevation=scanned_elev, accumulation=flowacc, stream=stream, drainage=drain, threshold=threshold, env=env) gscript.run_command('r.hydrodem', input=scanned_elev, output="hydrodem", mod=30, env=env) gs.run_command("g.copy", raster=[scanned_elev, scanned_calib_elev], env=env)
def main(): map = options['map'] layer = options['layer'] column = options['column'] mapset = grass.gisenv()['MAPSET'] if not grass.find_file(map, element = 'vector', mapset = mapset): grass.fatal(_("Vector map <%s> not found in current mapset") % map) f = grass.vector_layer_db(map, layer) table = f['table'] keycol = f['key'] database = f['database'] driver = f['driver'] if not table: grass.fatal(_("There is no table connected to the input vector map. Cannot rename any column")) cols = column.split(',') oldcol = cols[0] newcol = cols[1] if driver == "dbf": if len(newcol) > 10: grass.fatal(_("Column name <%s> too long. The DBF driver supports column names not longer than 10 characters") % newcol) if oldcol == keycol: grass.fatal(_("Cannot rename column <%s> as it is needed to keep table <%s> connected to the input vector map") % (oldcol, table)) # describe old col oldcoltype = None for f in grass.db_describe(table)['cols']: if f[0] != oldcol: continue oldcoltype = f[1] oldcollength = f[2] # old col there? if not oldcoltype: grass.fatal(_("Column <%s> not found in table <%s>") % (oldcol, table)) # some tricks if driver in ['sqlite', 'dbf']: if oldcoltype.upper() == "CHARACTER": colspec = "%s varchar(%s)" % (newcol, oldcollength) else: colspec = "%s %s" % (newcol, oldcoltype) grass.run_command('v.db.addcolumn', map = map, layer = layer, column = colspec) sql = "UPDATE %s SET %s=%s" % (table, newcol, oldcol) grass.write_command('db.execute', input = '-', database = database, driver = driver, stdin = sql) grass.run_command('v.db.dropcolumn', map = map, layer = layer, column = oldcol) else: sql = "ALTER TABLE %s RENAME %s TO %s" % (table, oldcol, newcol) grass.write_command('db.execute', input = '-', database = database, driver = driver, stdin = sql) # write cmd history: grass.vector_history(map)
def main(options, flags): name = options['color'] n_colors = int(options['ncolors']) discrete = flags['d'] if flags['n']: name += '_r' if os.path.isfile(name): ns = {'__name__': '', '__file__': os.path.basename(name), } with open(name) as f: code = compile(f.read(), os.path.basename(name), 'exec') exec(code, globals(), ns) cmap = ns.get("test_cm", None) # we ignore user input since we need to use whatever the # color map object is defined with n_colors = cmap.N else: # not sure if datad is part of the API but it is in one example # datad might be potentially better way of getting the table # it contains the raw data, but on the other hand it might not be # clear if you can interpolate linearly in between (but likely yes) if hasattr(cm, 'datad') and name not in cm.datad.keys(): import matplotlib as mpl gscript.fatal(_("Matplotlib {v} does not contain color table" " <{n}>").format(v=mpl.__version__, n=name)) cmap = cm.get_cmap(name, lut=n_colors) comments = [] comments.append( "Generated from Matplotlib color table <{}>".format(name)) comments.append( "using:") command = [sys.argv[0].split(os.path.sep)[-1]] command.extend(sys.argv[1:]) comments.append( " {}".format(' '.join(command))) rules = mpl_cmap_to_rules(cmap, n_colors=n_colors, discrete=discrete, comments=comments) if options['map']: rcf = '' for char in 'gae': if flags[char]: rcf += char gscript.write_command('r.colors', map=options['map'], flags=rcf, rules='-', stdin=rules,) if options['output']: with open(options['output'], 'w') as f: f.write(rules) f.write('\n') elif not options['map']: print(rules)
def run_cutfill(real_elev, scanned_elev, env, **kwargs): # this doesn't need to be computed on the fly dem_env = get_environment(raster=real_elev) gs.run_command( "r.contour", input=real_elev, output="contours_dem", step=5, flags="t", env=dem_env, ) # compute difference and set color table using standard deviations gs.mapcalc("diff = {r} - {s}".format(r=real_elev, s=scanned_elev), env=env) gs.mapcalc("absdiff = abs(diff)", env=env) univar = gs.parse_command("r.univar", flags="g", map="absdiff", env=env) std1 = float(univar["stddev"]) std2 = 2 * std1 maxv = float(univar["max"]) + 1 rules = [ f"-{maxv} black", f"-{std2} 202:000:032", f"-{std1} 244:165:130", "0 247:247:247", f"{std1} 146:197:222", f"{std2} 5:113:176", f"{maxv} black", ] gs.write_command("r.colors", map="diff", rules="-", stdin="\n".join(rules), env=env)
def preprocess(reflbandnames, panbandnames, dataset, outputraster, scene): options, flags = grass.parser() pansuffix = ['red', 'green', 'blue'] print os.environ importregex = re.compile('.*[.]TIF') counter = 1 for file in os.listdir(dataset): if re.search(importregex, file): if len(file) == 29: num = file[23] + file[24] else: num = file[23] read2_command('r.external', input=dataset + '/' + file, output='B' + num, overwrite=True, flags='e') counter = counter + 1 for file in os.listdir(dataset): if fnmatch.fnmatch(file, '*.txt'): mtl = file metfile = os.path.join(dataset, mtl) read2_command('i.landsat.toar', input='B', output='B_refl', metfile=metfile, sensor='oli8', overwrite=True) print('reflectance calculated') read2_command('r.colors', map=reflbandnames, flags='e', color='grey') print('histograms equalized') read2_command('i.colors.enhance', red=reflbandnames[0], green=reflbandnames[1], blue=reflbandnames[2]) print('colors enhanced') # pansharpen read2_command('i.fusion.brovey', ms3=reflbandnames[0], ms2=reflbandnames[1], ms1=reflbandnames[2], pan=panbandnames[3], overwrite=True, flags='l', output_prefix='brov') pannames = ['brov.' + s for s in pansuffix] pannames255 = [s + '_255' for s in pannames] print('pansharpening and composition achieved') read2_command('g.region', raster=pannames) read2_command('r.colors', map=pannames, flags='e', color='grey') for raster in pannames: minmax = grass.parse_command('r.info', map=raster, flags='r') print(minmax) newrast = raster + '_255' grass.write_command('r.recode', input=raster, output=newrast, rules='-', stdin=minmax[u'min'] + ':' + minmax[u'max'] + ':0:255', overwrite=True) print('rasters recoded to CELL type') # equalize colors once again read2_command('r.colors', map=[pannames255[0], pannames255[1], pannames255[2]], flags='e', color='grey') read2_command('i.colors.enhance', red=pannames255[0], green=pannames255[1], blue=pannames255[2]) #read2_command('r.composite', red=pannames[0], green=pannames[1], blue=pannames[2], output='comp', # overwrite=True) # create imagery group read2_command('i.group', group='pangroup876', subgroup='pangroup876', input=pannames255) print('created imagery group') read2_command('r.out.gdal', input='pangroup876', output=outputraster, overwrite=True, format='GTiff', type='Int32', flags='f')
def point_selection(lee_dradd,m,nstep,point_out,dem): """Allows you to select one or more sections for the prediction""" grass.run_command('g.remove',vect=point_out) grass.run_command('v.in.ascii',output=point_out,flags='e') grass.run_command('v.db.addtable',map=point_out,columns='cat integer,lee integer,E double precision,N double precision') nstep=int(nstep) for j in range(nstep+1): grass.run_command('v.db.addcol',map=point_out,columns='F1'+str(j+1)+' double precision,F2'+str(j+1)+' double precision') id_section=[] east_section=[] north_section=[] z_avg=[] z_pt=[] grass.run_command('d.vect',map=lee_dradd,color='255:0:0') print "+--------------------------------------------------------------------+" print " Please zoom on the zone where you want to perform the prediction" print "+--------------------------------------------------------------------+" grass.run_command('d.zoom') if m: print "+----------------------------------------------------------------+" print " Please select multiple points where to perform your prediction" print "+----------------------------------------------------------------+" output=(grass.read_command('d.what.vect',flags='xt',map=lee_dradd)).split('\n') for i in output: if re.match('category:',i): id_section.append(int(i.split(': ')[1])) elif re.match('.*\(E\).*',i): east_section.append(float(i.split('(E) ')[0])) north_section.append(float(((i.split('(E) ')[1])).split('(N')[0])) else: print "+------------------------------------------------------------+" print " Please select the point where to perform your prediction" print "+------------------------------------------------------------+" output=(grass.read_command('d.what.vect',flags='1xt',map=lee_dradd)).split('\n') id_section=[int(i.split(': ')[1]) for i in output if re.match('category:',i)] east_section=[float(i.split('(E) ')[0]) for i in output if re.match('.*\(E\).*',i)] north_section=[float(((i.split('(E) ')[1])).split('(N')[0]) for i in output if re.match('.*\(E\).*',i)] n=0 for h in east_section: grass.run_command('v.in.ascii',flags='e',output='out'+str(n)) grass.write_command('v.in.ascii',output='out'+str(n),x='1',y='2',fs='|',stdin='%s|%s' % (h,north_section[n])) grass.run_command('v.db.addtable',map='out'+str(n),columns='cat integer,lee integer,E double precision,N double precision') for j in range(nstep+1): grass.run_command('v.db.addcol',map='out'+str(n),columns='F1'+str(j+1)+' double precision,F2'+str(j+1)+' double precision') grass.run_command('v.patch',flags='ae',input='out'+str(n),output=point_out) grass.run_command('v.category',input=point_out,option='del',output='point_del') grass.run_command('v.category',input='point_del',option='add',output=point_out) grass.run_command('v.db.update',map=point_out,layer='1',column='lee',value=str(id_section[n]),where='cat='+str(n+1)) grass.run_command('v.to.db',map=point_out,option='cat',columns='cat') grass.run_command('v.to.db',map=point_out,option='coor',columns='E,N') n=n+1 grass.run_command('v.db.update',map=point_out,layer='1',column='lee',value=str(id_section[0]),where='cat=1') n=0 for i in id_section: Z=(float(grass.read_command('db.select',flags='c',sql='select startZ from lee_dradd where cat='+str(i),table='lee_dradd'))+float(grass.read_command('db.select',flags='c',sql='select endZ from lee_dradd where cat='+str(i),table='lee_dradd')))/2 z_pt.append(float((grass.read_command('r.what',input=dem,east_north=str(east_section[n])+','+str(north_section[n]))).split('|')[3])) z_avg.append(Z) n=n+1 return id_section,east_section,north_section,z_avg,z_pt
def main(): vector = options['map'] layer = options['layer'] column = options['column'] value = options['value'] qcolumn = options['qcolumn'] where = options['where'] mapset = grass.gisenv()['MAPSET'] # does map exist in CURRENT mapset? if not grass.find_file(vector, element='vector', mapset=mapset)['file']: grass.fatal(_("Vector map <%s> not found in current mapset") % vector) try: f = grass.vector_db(vector)[int(layer)] except KeyError: grass.fatal( _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.' )) table = f['table'] database = f['database'] driver = f['driver'] # checking column types try: coltype = grass.vector_columns(vector, layer)[column]['type'] except KeyError: grass.fatal(_('Column <%s> not found') % column) if qcolumn: if value: grass.fatal(_('<value> and <qcolumn> are mutually exclusive')) # special case: we copy from another column value = qcolumn else: if not value: grass.fatal(_('Either <value> or <qcolumn> must be given')) # we insert a value if coltype.upper() not in ["INTEGER", "DOUBLE PRECISION"]: value = "'%s'" % value cmd = "UPDATE %s SET %s=%s" % (table, column, value) if where: cmd += " WHERE " + where grass.verbose("SQL: \"%s\"" % cmd) grass.write_command('db.execute', input='-', database=database, driver=driver, stdin=cmd) # write cmd history: grass.vector_history(vector) return 0
def adjust_futures_colors(raster): import grass.script as gs new_raster = raster + '_' info = gs.raster_info(raster) color = '0 200:200:200\n1 255:100:50\n{m} 255:255:0\n100 180:255:160'.format(m=info['max']) gs.mapcalc('{nr} = if({r} == -1, 100, {r})'.format(nr=new_raster, r=raster)) gs.write_command('r.colors', map=new_raster, stdin=color, rules='-') return new_raster
def onFuturesDone(event): event.userData['update']() env = event.userData['env'] #gscript.write_command('r.colors', map='final', rules='-', stdin='-1 104:200:104\n0 200:200:200\n1 250:100:50\n24 250:100:50', quiet=True) gscript.write_command('r.colors', map='final', rules='-', stdin='-1 104:200:104\n0 221:201:201\n1 165:42:42\n24 165:42:42', quiet=True, env=env) steps = gscript.read_command('g.list', type='raster', pattern='step_*', separator=',', quiet=True, env=env).strip() gscript.run_command('t.register', maps=steps, input='futures_series', start=2012, unit='years', increment=1, overwrite=True, quiet=True, env=env) gscript.write_command('t.rast.colors', input='futures_series', stdin='1 165:42:42', rules='-', env=env)
def main(): options, flags = gscript.parser() inputmap = options['input'] layer = options['layer'] outputmap = options['output'] sort_column = options['column'] reverse = True if flags['r']: reverse = False columns = gscript.vector_columns(inputmap) key_column = gscript.vector_layer_db(inputmap, layer)['key'] sort_index = columns[sort_column]['index']+2 sorted_cols = sorted(iter(columns.items()), key=lambda x_y: x_y[1]['index']) column_def="x DOUBLE PRECISION, y DOUBLE PRECISION, cat INTEGER" colnames = [] for colcount in range(1,len(sorted_cols)): name = sorted_cols[colcount][0] type = sorted_cols[colcount][1]['type'] if name == sort_column and (type != 'INTEGER' and type != 'DOUBLE PRECISION'): gscript.fatal('Sort column must be numeric') if name == key_column: continue colnames.append(name) column_def += ", %s %s" % (name, type) inpoints=gscript.read_command('v.out.ascii', in_=inputmap, columns=colnames, quiet=True) points=[] for line in inpoints.splitlines(): data = [num(x) for x in line.split('|')] points.append(data) points_sorted=sorted(points, key=lambda x: x[sort_index], reverse=reverse) outpoints = "" for list in points_sorted: outpoints+="|".join([str(x) for x in list])+"\n" gscript.write_command('v.in.ascii', input='-', stdin=outpoints, output=outputmap, x=1, y=2, cat=3, columns=column_def, quiet=True) gscript.run_command('v.db.dropcolumn', map=outputmap, columns='x,y', quiet=True) return 0
def main(): force = flags['f'] map = options['map'] table = options['table'] layer = options['layer'] # do some paranoia tests as well: f = gscript.vector_layer_db(map, layer) if not table: # Removing table name connected to selected layer table = f['table'] if not table: gscript.fatal(_("No table assigned to layer <%s>") % layer) else: # Removing user specified table existingtable = f['table'] if existingtable != table: gscript.fatal(_("User selected table <%s> but the table <%s> " "is linked to layer <%s>") % (table, existingtable, layer)) # we use the DB settings selected layer database = f['database'] driver = f['driver'] gscript.message(_("Removing table <%s> linked to layer <%s> of vector" " map <%s>") % (table, layer, map)) if not force: gscript.message(_("You must use the -f (force) flag to actually " "remove the table. Exiting.")) gscript.message(_("Leaving map/table unchanged.")) sys.exit(0) gscript.message(_("Dropping table <%s>...") % table) try: gscript.write_command('db.execute', stdin="DROP TABLE %s" % table, input='-', database=database, driver=driver) except CalledModuleError: gscript.fatal(_("An error occurred while running db.execute")) gscript.run_command('v.db.connect', flags='d', map=map, layer=layer) gscript.message(_("Current attribute table link(s):")) # silently test first to avoid confusing error messages nuldev = open(os.devnull, 'w') try: gscript.run_command('v.db.connect', flags='p', map=map, quiet=True, stdout=nuldev, stderr=nuldev) except CalledModuleError: gscript.message(_("(No database links remaining)")) else: gscript.run_command('v.db.connect', flags='p', map=map) # write cmd history: gscript.vector_history(map)
def render_2d(envs): # set rendering parameters brighten = 0 # percent brightness of shaded relief render_multiplier = 1 # multiplier for rendering size whitespace = 1.5 fontsize = 36 * render_multiplier # legend font size legend_coord = (10, 50, 1, 4) # legend display coordinates zscale = 1 # create rendering directory render = os.path.join(gisdbase, location, 'rendering') if not os.path.exists(render): os.makedirs(render) for mapset in simulations: # change mapset gscript.read_command('g.mapset', mapset=mapset, location=location) info = gscript.parse_command('r.info', map='elevation', flags='g') width = int(info.cols) * render_multiplier * whitespace height = int(info.rows) * render_multiplier # render net difference gscript.run_command('d.mon', start=driver, width=width, height=height, output=os.path.join( render, mapset + '_' + 'net_difference' + '.png'), overwrite=1) gscript.write_command('r.colors', map='net_difference', rules='-', stdin=difference_colors) gscript.run_command('r.relief', input='elevation', output='relief', altitude=90, azimuth=45, zscale=zscale, env=envs[mapset]) gscript.run_command('d.shade', shade='relief', color='net_difference', brighten=brighten) gscript.run_command('d.legend', raster='net_difference', fontsize=fontsize, at=legend_coord) gscript.run_command('d.mon', stop=driver) try: # stop cairo monitor gscript.run_command('d.mon', stop=driver) except CalledModuleError: pass
def set_colors(map, v0, v1): rules = [ "0% black\n", "%f black\n" % v0, "%f white\n" % v1, "100% white\n" ] rules = ''.join(rules) grass.write_command('r.colors', map = map, rules = '-', stdin = rules, quiet = True)
def run_view(scanned_elev, blender_path, env, **kwargs): # regression group = 'color' before = 'scan_saved' elev_threshold = 20 color_threshold = 150 dist_threshold = 30 change = 'change' arrow = 'arrow' arrow3d = 'arrow3d' arrow_final = 'arrow_final' ff = gscript.find_file(name=arrow_final, element='vector') old_points = [] if ff and ff['fullname']: old_p = gscript.read_command('v.out.ascii', input=ff['fullname'], format='standard', type='line', env=env).strip().splitlines() for op in old_p: line = op.strip().split() if line == ['1', '1']: continue try: x = float(line[0]) y = float(line[1]) z = float(line[2]) old_points.append((x, y, z)) except ValueError: continue except IndexError as e: print (line) print (e) continue reg_params = gscript.parse_command('r.regression.line', flags='g', mapx=before, mapy=scanned_elev, env=env) gscript.mapcalc(exp='{new} = if(({a} + {b} * {after}) - {before} > {thr}, 1, null())'.format(a=reg_params['a'], b=reg_params['b'], after=scanned_elev, before=before, thr=elev_threshold, new=change), env=env) #gscript.parse_command('r.univar', map=[group + '_r', group + '_g', group + '_b'], output=, flags='t', zones=, env=env) gscript.mapcalc(exp='{new} = if({change} && ({r} + {g} + {b}) / 3. >= {th}, 1, 2)'.format(new=arrow, change=change, th=color_threshold, r=group + '_r', g=group + '_g', b=group + '_b'), env=env) gscript.run_command('r.volume', input=arrow, clump=arrow, centroids=arrow, env=env) gscript.run_command('v.drape', input=arrow, output=arrow3d, elevation=before, method='bilinear', env=env) points = gscript.read_command('v.out.ascii', input=arrow3d, env=env).strip() if points: new_points = [] linetext = 'L 2 1\n' for p in points.splitlines(): x, y, z, c = p.split('|') new_points.append((float(x), float(y), float(z))) linetext += '{} {} {}\n'.format(x, y, z) linetext += '1 1\n' # compare old and new if not old_points or (dist(old_points[0], new_points[0]) > dist_threshold or dist(old_points[1], new_points[1]) > dist_threshold): print ('write') gscript.write_command('v.in.ascii', stdin=linetext, input='-', output=arrow_final, format='standard', flags='zn', env=env) blender_export_vector(vector=arrow_final, name='vantage', z=True, vtype='line', path=blender_path, time_suffix=False, env=env)
def main(): force = flags['f'] map = options['map'] table = options['table'] layer = options['layer'] # do some paranoia tests as well: f = grass.vector_layer_db(map, layer) if not table: # Removing table name connected to selected layer table = f['table'] if not table: grass.fatal(_("No table assigned to layer <%s>") % layer) else: # Removing user specified table existingtable = f['table'] if existingtable != table: grass.fatal(_("User selected table <%s> but the table <%s> is linked to layer <%s>") % (table, existingtable, layer)) # we use the DB settings selected layer database = f['database'] driver = f['driver'] grass.message(_("Removing table <%s> linked to layer <%s> of vector map <%s>") % (table, layer, map)) if not force: grass.message(_("You must use the -f (force) flag to actually remove the table. Exiting.")) grass.message(_("Leaving map/table unchanged.")) sys.exit(0) grass.message(_("Dropping table <%s>...") % table) try: grass.write_command('db.execute', stdin="DROP TABLE %s" % table, input='-', database=database, driver=driver) except CalledModuleError: grass.fatal(_("An error occurred while running db.execute")) grass.run_command('v.db.connect', flags = 'd', map = map, layer = layer) grass.message(_("Current attribute table link(s):")) # silently test first to avoid confusing error messages nuldev = file(os.devnull, 'w') try: grass.run_command('v.db.connect', flags='p', map=map, quiet=True, stdout=nuldev, stderr=nuldev) except CalledModuleError: grass.message(_("(No database links remaining)")) else: grass.run_command('v.db.connect', flags='p', map=map) # write cmd history: grass.vector_history(map)
def set_colors(map, v0, v1): rules = "".join( ["0% black\n", "%f black\n" % v0, "%f white\n" % v1, "100% white\n"]) gscript.write_command("r.colors", map=map, rules="-", stdin=rules, quiet=True)
def description_box(title): gscript.write_command('d.graph', stdin=""" color white polygon 0 0 0 16 100 16 100 0 """) gscript.run_command('d.text', text=title, at='2,11', size=5, color='black')
def tmpmask(raster, absolute_minimum): """Create tmp mask""" rules = "*:{}:1".format(absolute_minimum) tmprecode = create_temporary_name("tmprecode") gs.write_command("r.recode", input=raster, output=tmprecode, rule="-", stdin=rules, quiet=True) return tmprecode
def set_colors(map, v0, v1): rules = [ "0% black\n", "%f black\n" % v0, "%f white\n" % v1, "100% white\n" ] rules = ''.join(rules) grass.write_command('r.colors', map=map, rules='-', stdin=rules, quiet=True)
def main(): vector = options['map'] layer = options['layer'] column = options['column'] value = options['value'] qcolumn = options['qcolumn'] where = options['where'] mapset = grass.gisenv()['MAPSET'] # does map exist in CURRENT mapset? if not grass.find_file(vector, element = 'vector', mapset = mapset)['file']: grass.fatal(_("Vector map <%s> not found in current mapset") % vector) try: f = grass.vector_db(vector)[int(layer)] except KeyError: grass.fatal(_('There is no table connected to this map. Run v.db.connect or v.db.addtable first.')) table = f['table'] database = f['database'] driver = f['driver'] # checking column types try: coltype = grass.vector_columns(vector, layer)[column]['type'] except KeyError: grass.fatal(_('Column <%s> not found') % column) if qcolumn: if value: grass.fatal(_('<value> and <qcolumn> are mutually exclusive')) # special case: we copy from another column value = qcolumn else: if not value: grass.fatal(_('Either <value> or <qcolumn> must be given')) # we insert a value if coltype.upper() not in ["INTEGER", "DOUBLE PRECISION"]: value = "'%s'" % value cmd = "UPDATE %s SET %s=%s" % (table, column, value) if where: cmd += " WHERE " + where grass.verbose("SQL: \"%s\"" % cmd) grass.write_command('db.execute', input = '-', database = database, driver = driver, stdin = cmd) # write cmd history: grass.vector_history(vector) return 0
def grass_5by5(grass_xy_session, test_data_path): """Create a square, 5 by 5 domain. """ # Create new mapset gscript.run_command('g.mapset', mapset='5by5', flags='c') # Create 3by5 named region gscript.run_command('g.region', res=10, s=10, n=40, w=0, e=50, save='3by5') region = gscript.parse_command('g.region', flags='pg') assert int(region["cells"]) == 15 # Create raster for mask (do not apply mask) gscript.run_command('g.region', res=10, s=0, n=50, w=10, e=40) region = gscript.parse_command('g.region', flags='pg') assert int(region["cells"]) == 15 gscript.mapcalc('5by3=1') # Set a 5x5 region gscript.run_command('g.region', res=10, s=0, w=0, e=50, n=50) region = gscript.parse_command('g.region', flags='pg') assert int(region["cells"]) == 25 # DEM gscript.mapcalc('z=0') univar_z = gscript.parse_command('r.univar', map='z', flags='g') assert int(univar_z['min']) == 0 assert int(univar_z['max']) == 0 # Manning gscript.mapcalc('n=0.05') univar_n = gscript.parse_command('r.univar', map='n', flags='g') assert float(univar_n['min']) == 0.05 assert float(univar_n['max']) == 0.05 # Start depth gscript.write_command('v.in.ascii', input='-', stdin='25|25', output='start_h') gscript.run_command('v.to.rast', input='start_h', output='start_h', type='point', use='val', value=0.2) gscript.run_command('r.null', map='start_h', null=0) univar_start_h = gscript.parse_command('r.univar', map='start_h', flags='g') assert float(univar_start_h['min']) == 0 assert float(univar_start_h['max']) == 0.2 # Symmetry control points control_points = os.path.join(test_data_path, '5by5', 'control_points.csv') gscript.run_command('v.in.ascii', input=control_points, output='control_points', separator='comma') return None
def description_box(title): gscript.write_command( "d.graph", stdin=""" color white polygon 0 0 0 16 100 16 100 0 """, ) gscript.run_command("d.text", text=title, at="2,11", size=5, color="black")
def create_end_points(env): info = gs.raster_info("scan") y1 = info["south"] + 2 * (info["north"] - info["south"]) / 10.0 y2 = info["south"] + 8 * (info["north"] - info["south"]) / 10.0 x1 = info["west"] + 2 * (info["east"] - info["west"]) / 10.0 x2 = info["west"] + 8 * (info["east"] - info["west"]) / 10.0 gs.write_command( "v.in.ascii", input="-", stdin="{x1}|{y1}\n{x2}|{y2}".format(x1=x1, x2=x2, y1=y1, y2=y2), output="trail_points", env=env, ) return ((x1, y1), (x2, y2))
def compute_ies(INtmprule, INipi, INtmpf2, INenvmin, INenvmax): """ Compute the environmental similarity layer for the individual variables """ tmpf3 = tmpname('tmp6') gs.run_command("r.recode", input=INtmpf2, output=tmpf3, rules=INtmprule) calcc = "{0} = if({1} == 0, (float({2}) - {3}) / ({4} - {3}) " \ "* 100.0, if({1} <= 50, 2 * float({1}), "\ "if({1} < 100, 2*(100-float({1})), " \ "({4} - float({2})) / ({4} - {3}) * 100.0)))" \ .format(INipi, tmpf3, INtmpf2, float(INenvmin), float(INenvmax)) gs.mapcalc(calcc, quiet=True) gs.write_command("r.colors", map=INipi, rules='-', stdin=COLORS_MES, quiet=True)
def legend_item(color, text, x, y): gscript.write_command( "d.graph", stdin=""" color {color} polygon {x1} {y2} {x1} {y1} {x2} {y1} {x2} {y2} """.format( color=color, x1=x, y1=y, x2=x + 2, y2=y + 2 ), ) gscript.run_command("d.text", text=text, at=(x + 3, y + 0.5), size=2, color="black")
def run_example(scanned_elev, env, **kwargs): gs.run_command('r.slope.aspect', elevation=scanned_elev, slope='slope', format='percent', env=env) # reclassify using rules passed as a string to standard input # 0:2:1 means reclassify interval 0 to 2 percent of slope to category 1 rules = ['0:2:1', '2:5:2', '5:8:3', '8:15:4', '15:30:5', '30:*:6'] gs.write_command('r.recode', input='slope', output='slope_class', rules='-', stdin='\n'.join(rules), env=env) # set new color table: green - yellow - red gs.run_command('r.colors', map='slope_class', color='gyr', env=env)
def main(): map = options['map'] layer = options['layer'] column = options['column'] otable = options['otable'] ocolumn = options['ocolumn'] f = grass.vector_layer_db(map, layer) maptable = f['table'] database = f['database'] driver = f['driver'] if driver == 'dbf': grass.fatal(_("JOIN is not supported for tables stored in DBF format")) if not maptable: grass.fatal(_("There is no table connected to this map. Unable to join any column.")) if not grass.vector_columns(map, layer).has_key(column): grass.fatal(_("Column <%s> not found in table <%s> at layer <%s>") % (column, map, layer)) all_cols_ot = grass.db_describe(otable, driver = driver, database = database)['cols'] all_cols_tt = grass.vector_columns(map, int(layer)).keys() select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column" template = string.Template("UPDATE $table SET $colname=(%s);" % select) for col in all_cols_ot: # Skip the vector column which is used for join colname = col[0] if colname == column: continue # Sqlite 3 does not support the precision number any more if len(col) > 2 and driver != "sqlite": coltype = "%s(%s)" % (col[1], col[2]) else: coltype = "%s" % col[1] colspec = "%s %s" % (colname, coltype) # Add only the new column to the table if colname not in all_cols_tt: if grass.run_command('v.db.addcolumn', map = map, columns = colspec, layer = layer) != 0: grass.fatal(_("Error creating column <%s>") % colname) stmt = template.substitute(table = maptable, column = column, otable = otable, ocolumn = ocolumn, colname = colname) grass.verbose(_("Updating column <%s> of vector map <%s>...") % (colname, map)) if grass.write_command('db.execute', stdin = stmt, input = '-', database = database, driver = driver) != 0: grass.fatal(_("Error filling column <%s>") % colname) # write cmd history: grass.vector_history(map)
def updateColumn(mapName, column, cats, values=None): """!Updates column values for rows with a given categories. \param cats categories to be updated or a list of tuples (cat, value) if \p values is None \param values to be set for column (same length as cats) or \c None """ statements = "" for i in range(len(cats)): if values is None: cat = str(cats[i][0]) val = str(cats[i][1]) else: cat = str(cats[i]) val = str(values[i]) statement = "UPDATE " + mapName + " SET " statement += column + " = " + val statement += " WHERE cat = " + cat statements += statement + ";\n" grass.write_command("db.execute", input="-", stdin=statements)
def updateColumn(mapName, column, cats, values=None): """!Updates column values for rows with a given categories. \param cats categories to be updated or a list of tuples (cat, value) if \p values is None \param values to be set for column (same length as cats) or \c None """ statements = '' for i in range(len(cats)): if values is None: cat = str(cats[i][0]) val = str(cats[i][1]) else: cat = str(cats[i]) val = str(values[i]) statement = 'UPDATE ' + mapName + ' SET ' statement += column + ' = ' + val statement += ' WHERE cat = ' + cat statements += statement + ';\n' grass.write_command('db.execute', input='-', stdin=statements)
def _getSoillossbare(self,lsfactor,kfactor,rfactor,soillossbare): """!Calculate soilloss on bare soil A = R * K * LS A potential soil loss t/(ha*a) for bare soil LS LS-factor R rain erosivity factor K soil erodibility factor @return """ formula_soillossbare = "$soillossbare = $lsfactor * $kfactor * $rfactor" gscript.mapcalc(formula_soillossbare, soillossbare = soillossbare, lsfactor = lsfactor, kfactor = kfactor, rfactor = rfactor, quiet=quiet) colrules = '\n '.join([ "0 0:102:0", "20 51:153:0", "30 204:255:0", "40 255:255:0", "55 255:102:0", "100 255:0:0", "150 204:0:0", "250 153:0:0", "500 102:0:0", "5000 102:0:0" ]) gscript.write_command("r.colors", map = soillossbare, rules = '-', stdin = colrules, quiet = quiet) return soillossbare
def main(image, gisenv, gisrc): os.environ['GISRC'] = gisrc path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'], gisenv['MAPSET'], 'group', image) path_to_points = os.path.join(path, 'POINTS') # setup target environment and switch to it path_to_TARGET = os.path.join(path, 'TARGET') with open(path_to_TARGET, 'r') as f: target = f.readlines() target_location = target[0] target_mapset = target[1] target_gisrc, tenv = getEnvironment(gisenv['GISDBASE'], target_location, target_mapset) im = io.imread('{}.jpg'.format(image)) dst = [] src = [] with open(path_to_points) as f: for line in f.readlines(): if line.startswith('#'): continue dstx, dsty, srcx, srcy, ok = line.split() if int(ok): dst.append((float(dstx), float(dsty))) src.append((float(srcx), float(srcy))) dst = np.array(dst) src = np.array(src) # src2 = copy.copy(src) dst[:, 1] = im.shape[0] - dst[:, 1] centerx, centery = np.min(src[:, 0]), np.min(src[:, 1]) src[:, 0] -= centerx src[:, 1] -= centery revers = 400 src[:, 1] = revers - src[:, 1] tform3 = tf.ProjectiveTransform() tform3.estimate(src, dst) warped = tf.warp(im, tform3) os.environ['GISRC'] = target_gisrc gscript.run_command('g.region', w=centerx, e=centerx + im.shape[1], n=centery + revers, s=centery - im.shape[0] + revers, res=1) name = 'rectified_{}'.format(image) for num, color in zip([0, 1, 2], 'rgb'): rectified = garray.array() for y in range(rectified.shape[0]): for x in range(rectified.shape[1]): rectified[y, x] = round(255 * warped[y, x, num]) rectified.write(mapname=name + '_' + color, overwrite=True) gscript.run_command('r.colors', map=[name + '_r', name + '_g', name + '_b'], color='grey') gscript.run_command('r.composite', red=name + '_r', green=name + '_g', blue=name + '_b', output=name, overwrite=True) gscript.run_command('g.remove', type='raster', pattern=name + "_*", flags='f') os.environ['GISRC'] = gisrc # indicatrix print indicatrix(raster=image, size=5) # rectify points H = inv(tform3.params) name = image.strip('camera_') os.environ['GISRC'] = gisrc vectors = gscript.read_command('g.list', type='vector', pattern="*{}*".format(name), exclude='*indicatrix').strip().splitlines() for vector in vectors: os.environ['GISRC'] = gisrc points = gscript.read_command('v.out.ascii', input=vector, columns='*').strip() new = [] for record in points.splitlines(): point = record.split('|') xx, yy = float(point[0]), float(point[1]) yy = im.shape[0] - yy Z = xx * H[2, 0] + yy * H[2, 1] + H[2, 2] X = (xx * H[0, 0] + yy * H[0, 1] + H[0, 2]) / Z Y = (xx * H[1, 0] + yy * H[1, 1] + H[1, 2]) / Z X += centerx Y = revers - Y + centery # Y = -Y + centery new.append([point[3], X, Y, point[2]]) new[-1].extend(point[4:]) new[-1] = '|'.join([str(each) for each in new[-1]]) os.environ['GISRC'] = target_gisrc gscript.write_command('v.in.ascii', input='-', flags='z', output='points_{}'.format(vector), overwrite=True, stdin='\n'.join(new), columns="cat integer,x double precision,y double precision,height double precision," "date varchar(50),time varchar(50),hour integer," "minutes integer,url varchar(500),url2 varchar(500)", x=2, y=3, z=4, cat=1) os.environ['GISRC'] = gisrc vectors = gscript.read_command('g.list', type='vector', pattern="*{}*indicatrix".format(name)).strip().splitlines() for vector in vectors: os.environ['GISRC'] = gisrc lines = gscript.read_command('v.out.ascii', input=vector, format='standard').strip() new = [] for record in lines.splitlines(): first = record.strip().split()[0].strip() try: float(first) except ValueError: new.append(record) continue if first == '1': new.append(record) continue xx, yy = record.strip().split() xx, yy = float(xx), float(yy) yy = im.shape[0] - yy Z = xx * H[2, 0] + yy * H[2, 1] + H[2, 2] X = (xx * H[0, 0] + yy * H[0, 1] + H[0, 2]) / Z Y = (xx * H[1, 0] + yy * H[1, 1] + H[1, 2]) / Z X += centerx Y = revers - Y + centery # Y = -Y + centery new.append('{} {}'.format(X, Y)) os.environ['GISRC'] = target_gisrc gscript.write_command('v.in.ascii', input='-', output=vector, format='standard', overwrite=True, stdin='\n'.join(new)) gscript.run_command('v.generalize', overwrite=True, input=vector, type='line', output=vector + 'tmp', method='snakes', threshold=10) gscript.run_command('g.rename', vector=[vector + 'tmp', vector], overwrite=True) gscript.try_remove(target_gisrc) return
def main(): vector = options["map"] layer = options["layer"] column = options["column"] value = options["value"] qcolumn = options["query_column"] where = options["where"] sqlitefile = options["sqliteextra"] mapset = grass.gisenv()["MAPSET"] # does map exist in CURRENT mapset? if not grass.find_file(vector, element="vector", mapset=mapset)["file"]: grass.fatal(_("Vector map <%s> not found in current mapset") % vector) try: f = grass.vector_db(vector)[int(layer)] except KeyError: grass.fatal(_("There is no table connected to this map. Run v.db.connect or v.db.addtable first.")) table = f["table"] database = f["database"] driver = f["driver"] # check for SQLite backend for extra functions if sqlitefile and driver != "sqlite": grass.fatal(_("Use of libsqlitefunctions only with SQLite backend")) if driver == "sqlite" and sqlitefile: if not os.access(sqlitefile, os.R_OK): grass.fatal(_("File <%s> not found") % sqlitefile) # checking column types try: coltype = grass.vector_columns(vector, layer)[column]["type"] except KeyError: grass.fatal(_("Column <%s> not found") % column) if qcolumn: if value: grass.fatal(_("<value> and <qcolumn> are mutually exclusive")) # special case: we copy from another column value = qcolumn else: if not value: grass.fatal(_("Either <value> or <qcolumn> must be given")) # we insert a value if coltype.upper() not in ["INTEGER", "DOUBLE PRECISION"]: value = "'%s'" % value cmd = "UPDATE %s SET %s=%s" % (table, column, value) if where: cmd += " WHERE " + where # SQLite: preload extra functions from extension lib if provided by user if sqlitefile: sqliteload = "SELECT load_extension('%s');\n" % sqlitefile cmd = sqliteload + cmd grass.verbose('SQL: "%s"' % cmd) grass.write_command("db.execute", input="-", database=database, driver=driver, stdin=cmd) # write cmd history: grass.vector_history(vector) return 0
mean_dem_difference = dem.replace("dem", "mean_dem_difference") mean_dem_regression = dem.replace("dem", "mean_dem_regression") mean_dem_regression_difference = dem.replace("dem", "mean_dem_regression_difference") mean_forms_difference = dem.replace("dem", "mean_forms_difference") mean_depth_difference = dem.replace("dem", "mean_depth_difference") # stdev variables stdev_dem = dem.replace("dem", "stdev_dem") stdev_difference_series = dem.replace("dem", "stdev_difference_series") stdev_regression_difference_series = dem.replace("dem", "stdev_regression_difference_series") # set region gscript.run_command("g.region", rast=region, res=res) # 3D render elevation gscript.write_command("r.colors", map=dem, rules="-", stdin=dem_colors_3d) gscript.run_command( "m.nviz.image", elevation_map=dem, color_map=dem, resolution_fine=res_3d, height=height_3d, perspective=perspective, light_position=light_position, fringe=fringe, fringe_color=color_3d, fringe_elevation=fringe_elevation, output=os.path.join(render_3d, dem), format=format_3d, size=size_3d, errors="ignore",
def main(): global vrtfile, tmpfile infile = options['input'] rast = options['output'] also = flags['a'] #### check for gdalinfo (just to check if installation is complete) if not grass.find_program('gdalinfo', '--help'): grass.fatal(_("'gdalinfo' not found, install GDAL tools first (http://www.gdal.org)")) pid = str(os.getpid()) tmpfile = grass.tempfile() ################### let's go spotdir = os.path.dirname(infile) spotname = grass.basename(infile, 'hdf') if rast: name = rast else: name = spotname if not grass.overwrite() and grass.find_file(name)['file']: grass.fatal(_("<%s> already exists. Aborting.") % name) # still a ZIP file? (is this portable?? see the r.in.srtm script for ideas) if infile.lower().endswith('.zip'): grass.fatal(_("Please extract %s before import.") % infile) try: p = grass.Popen(['file', '-ib', infile], stdout = grass.PIPE) s = p.communicate()[0] if s == "application/x-zip": grass.fatal(_("Please extract %s before import.") % infile) except: pass ### create VRT header for NDVI projfile = os.path.join(spotdir, "0001_LOG.TXT") vrtfile = tmpfile + '.vrt' # first process the NDVI: grass.try_remove(vrtfile) create_VRT_file(projfile, vrtfile, infile) ## let's import the NDVI map... grass.message(_("Importing SPOT VGT NDVI map...")) try: grass.run_command('r.in.gdal', input=vrtfile, output=name) except CalledModuleError: grass.fatal(_("An error occurred. Stop.")) grass.message(_("Imported SPOT VEGETATION NDVI map <%s>.") % name) ################# ## http://www.vgt.vito.be/faq/FAQS/faq19.html # What is the relation between the digital number and the real NDVI ? # Real NDVI =coefficient a * Digital Number + coefficient b # = a * DN +b # # Coefficient a = 0.004 # Coefficient b = -0.1 # clone current region # switch to a temporary region grass.use_temp_region() grass.run_command('g.region', raster = name, quiet = True) grass.message(_("Remapping digital numbers to NDVI...")) tmpname = "%s_%s" % (name, pid) grass.mapcalc("$tmpname = 0.004 * $name - 0.1", tmpname = tmpname, name = name) grass.run_command('g.remove', type = 'raster', name = name, quiet = True, flags = 'f') grass.run_command('g.rename', raster = (tmpname, name), quiet = True) # write cmd history: grass.raster_history(name) #apply color table: grass.run_command('r.colors', map = name, color = 'ndvi', quiet = True) ########################## # second, optionally process the SM quality map: #SM Status Map # http://nieuw.vgt.vito.be/faq/FAQS/faq22.html #Data about # Bit NR 7: Radiometric quality for B0 coded as 0 if bad and 1 if good # Bit NR 6: Radiometric quality for B2 coded as 0 if bad and 1 if good # Bit NR 5: Radiometric quality for B3 coded as 0 if bad and 1 if good # Bit NR 4: Radiometric quality for MIR coded as 0 if bad and 1 if good # Bit NR 3: land code 1 or water code 0 # Bit NR 2: ice/snow code 1 , code 0 if there is no ice/snow # Bit NR 1: 0 0 1 1 # Bit NR 0: 0 1 0 1 # clear shadow uncertain cloud # #Note: # pos 7 6 5 4 3 2 1 0 (bit position) # 128 64 32 16 8 4 2 1 (values for 8 bit) # # # Bit 4-7 should be 1: their sum is 240 # Bit 3 land code, should be 1, sum up to 248 along with higher bits # Bit 2 ice/snow code # Bit 0-1 should be 0 # # A good map threshold: >= 248 if also: grass.message(_("Importing SPOT VGT NDVI quality map...")) grass.try_remove(vrtfile) qname = spotname.replace('NDV','SM') qfile = os.path.join(spotdir, qname) create_VRT_file(projfile, vrtfile, qfile) ## let's import the SM quality map... smfile = name + '.sm' try: grass.run_command('r.in.gdal', input=vrtfile, output=smfile) except CalledModuleError: grass.fatal(_("An error occurred. Stop.")) # some of the possible values: rules = [r + '\n' for r in [ '8 50 50 50', '11 70 70 70', '12 90 90 90', '60 grey', '155 blue', '232 violet', '235 red', '236 brown', '248 orange', '251 yellow', '252 green' ]] grass.write_command('r.colors', map = smfile, rules = '-', stdin = rules) grass.message(_("Imported SPOT VEGETATION SM quality map <%s>.") % smfile) grass.message(_("Note: A snow map can be extracted by category 252 (d.rast %s cat=252)") % smfile) grass.message("") grass.message(_("Filtering NDVI map by Status Map quality layer...")) filtfile = "%s_filt" % name grass.mapcalc("$filtfile = if($smfile % 4 == 3 || ($smfile / 16) % 16 == 0, null(), $name)", filtfile = filtfile, smfile = smfile, name = name) grass.run_command('r.colors', map = filtfile, color = 'ndvi', quiet = True) grass.message(_("Filtered SPOT VEGETATION NDVI map <%s>.") % filtfile) # write cmd history: grass.raster_history(smfile) grass.raster_history(filtfile) grass.message(_("Done."))
def main(): global tmp_graph, tmp_group, tmp_psmap, tmp_psleg, tmp_gisleg breakpoints = options['breakpoints'] colorscheme = options['colorscheme'] column = options['column'] endcolor = options['endcolor'] group = options['group'] layer = options['layer'] linecolor = options['linecolor'] map = options['map'] maxsize = options['maxsize'] monitor = options['monitor'] nint = options['nint'] pointcolor = options['pointcolor'] psmap = options['psmap'] size = options['size'] startcolor = options['startcolor'] themecalc = options['themecalc'] themetype = options['themetype'] type = options['type'] where = options['where'] icon = options['icon'] flag_f = flags['f'] flag_g = flags['g'] flag_l = flags['l'] flag_m = flags['m'] flag_s = flags['s'] flag_u = flags['u'] layer = int(layer) nint = int(nint) size = float(size) maxsize = float(maxsize) # check column type inf = grass.vector_columns(map, layer) if column not in inf: grass.fatal(_("No such column <%s>") % column) coltype = inf[column]['type'].lower() if coltype not in ["integer", "double precision"]: grass.fatal(_("Column <%s> is of type <%s> which is not numeric.") % (column, coltype)) # create temporary file to hold d.graph commands for legend tmp_graph = grass.tempfile() # Create temporary file to commands for GIS Manager group tmp_group = grass.tempfile() # Create temporary file for commands for ps.map map file tmp_psmap = grass.tempfile() # Create temporary file for commands for ps.map legend file tmp_psleg = grass.tempfile() # create file to hold elements for GIS Manager legend tmp_gisleg = grass.tempfile() # Set display variables for group atype = int(type == "area") ptype = int(type == "point") ctype = int(type == "centroid") ltype = int(type == "line") btype = int(type == "boundary") # if running in the GUI, do not create a graphic legend in an xmon if flag_s: flag_l = False # if running in GUI, turn off immediate mode rendering so that the # iterated d.vect commands will composite using the display driver os.environ['GRASS_PNG_READ'] = 'TRUE' os.environ['GRASS_PNG_AUTO_WRITE'] = 'FALSE' db = grass.vector_db(map)[1] if not db or not db['table']: grass.fatal(_("No table connected or layer <%s> does not exist.") % layer) table = db['table'] database = db['database'] driver = db['driver'] # update color values to the table? if flag_u: # test, if the column GRASSRGB is in the table s = grass.read_command('db.columns', table = table, database = database, driver = driver) if 'grassrgb' not in s.splitlines(): msg(locals(), _("Creating column 'grassrgb' in table <$table>")) sql = "ALTER TABLE %s ADD COLUMN grassrgb varchar(11)" % table grass.write_command('db.execute', database = database, driver = driver, stdin = sql) # Group name if not group: group = "themes" f_group = file(tmp_group, 'w') f_group.write("Group %s\n" % group) # Calculate statistics for thematic intervals if type == "line": stype = "line" else: stype = ["point", "centroid"] if not where: where = None stats = grass.read_command('v.univar', flags = 'eg', map = map, type = stype, column = column, where = where, layer = layer) stats = grass.parse_key_val(stats) min = float(stats['min']) max = float(stats['max']) mean = float(stats['mean']) sd = float(stats['population_stddev']) q1 = float(stats['first_quartile']) q2 = float(stats['median']) q3 = float(stats['third_quartile']) q4 = max ptsize = size if breakpoints and themecalc != "custom_breaks": grass.warning(_("Custom breakpoints ignored due to themecalc setting.")) # set interval for each thematic map calculation type if themecalc == "interval": numint = nint step = float(max - min) / numint breakpoints = [min + i * step for i in xrange(numint + 1)] annotations = "" elif themecalc == "std_deviation": # 2 standard deviation units on either side of mean, # plus min to -2 sd units and +2 sd units to max, if applicable breakpoints = [min] + [i for i in [(mean + i * sd) for i in [-2,-1,0,1,2]] if min < i < max] + [max] annotations = [""] + [("%dsd" % i) for (i, j) in [(i, mean + i * sd) for i in [-2,-1,0,1,2]] if (min < j < max)] + [""] annotations = ";".join(annotations) numint = len(breakpoints) - 1 elif themecalc == "quartiles": numint=4 # one for each quartile breakpoints = [min, q1, q2, q3, max] annotations = " %f; %f; %f; %f" % (q1, q2, q3, q4) elif themecalc == "custom_breaks": if not breakpoints: breakpoints = sys.stdin.read() breakpoints = [int(x) for x in breakpoints.split()] numint = len(breakpoints) - 1 annotations = "" else: grass.fatal(_("Unknown themecalc type <%s>") % themecalc) pointstep = (maxsize - ptsize) / (numint - 1) # Prepare legend cuts for too large numint if numint > max_leg_items: xupper = int(numint - max_leg_items / 2) + 1 xlower = int(max_leg_items / 2) + 1 else: xupper = 0 xlower = 0 # legend title f_graph = file(tmp_graph, 'w') out(f_graph, locals(), """\ color 0:0:0 size 2 2 move 1 95 text Thematic map legend for column $column of map $map size 1.5 1.8 move 4 90 text Value range: $min - $max """) f_gisleg = file(tmp_gisleg, 'w') out(f_gisleg, locals(), """\ title - - - {Thematic map legend for column $column of map $map} """) f_psleg = file(tmp_psleg, 'w') out(f_psleg, locals(), """\ text 1% 95% Thematic map legend for column $column of map $map ref bottom left end text 4% 90% Value range: $min - $max ref bottom left end """) msg(locals(), _("Thematic map legend for column $column of map $map")) msg(locals(), _("Value range: $min - $max")) colorschemes = { "blue-red": ("0:0:255", "255:0:0"), "red-blue": ("255:0:0", "0:0:255"), "green-red": ("0:255:0", "255:0:0"), "red-green": ("255:0:0", "0:255:0"), "blue-green": ("0:0:255", "0:255:0"), "green-blue": ("0:255:0", "0:0:255"), "cyan-yellow": ("0:255:255", "255:255:0"), "yellow-cyan": ("255:255:0", "0:255:255"), "custom_gradient": (startcolor, endcolor) } # open file for psmap instructions f_psmap = file(tmp_psmap, 'w') # graduated color thematic mapping if themetype == "graduated_colors": if colorscheme in colorschemes: startc, endc = colorschemes[colorscheme] # set color schemes for graduated color maps elif colorscheme == "single_color": if themetype == "graduated_points": startc = endc = linecolor else: startc = endc = pointcolor else: grass.fatal(_("This should not happen: parser error. Unknown color scheme %s") % colorscheme) color = __builtins__.map(int, startc.split(":")) endcolor = __builtins__.map(int, endc.split(":")) #The number of color steps is one less then the number of classes nclrstep = numint - 1 clrstep = [(a - b) / nclrstep for a, b in zip(color, endcolor)] themecolor = startc # display graduated color themes if themecalc == "interval": out(f_graph, locals(), """\ move 4 87 text Mapped by $numint intervals of $step """) out(f_gisleg, locals(), """\ subtitle - - - {Mapped by $numint intervals of $step} """) out(f_psleg, locals(), """\ text 4% 87% Mapped by $numint intervals of $step ref bottom left end """) msg(locals(), _("Mapped by $numint intervals of $step")) # display graduated color themes for standard deviation units if themecalc == "std_deviation": out(f_graph, locals(), """\ move 4 87 text Mapped by standard deviation units of $sd (mean = $mean) """) out(f_gisleg, locals(), """\ subtitle - - - {Mapped by standard deviation units of $sd (mean = $mean)} """) out(f_psleg, locals(), """\ text 4% 87% Mapped by standard deviation units of $sd (mean = $mean) ref bottom left end """) msg(locals(), _("Mapped by standard deviation units of $sd (mean = $mean)")) # display graduated color themes for quartiles if themecalc == "quartiles": out(f_graph, locals(), """\ move 4 87 text Mapped by quartiles (median = $q2) """) out(f_gisleg, locals(), """\ subtitle - - - {Mapped by quartiles (median = $q2)} """) out(f_psleg, locals(), """\ text 4% 87% Mapped by quartiles (median = $q2) ref bottom left end """) msg(locals(), _("Mapped by quartiles (median = $q2)")) f_graph.write("""\ move 4 83 text Color move 14 83 text Value move 4 80 text ===== move 14 80 text ============ """) f_psleg.write("""\ text 4% 83% Color ref bottom left end text 14% 83% Value ref bottom left end text 4% 80% ===== ref bottom left end text 14% 80% ============ ref bottom left end """) sys.stdout.write("Color(R:G:B)\tValue\n") sys.stdout.write("============\t==========\n") line1 = 78 line2 = 76 line3 = 75 i = 1 first = True while i < numint: if flag_m: # math notation if first: closebracket = "]" openbracket = "[" mincomparison = ">=" first = False else: closebracket = "]" openbracket = "]" mincomparison = ">" else: closebracket = "" openbracket = "" if first: mincomparison = ">=" first = False else: mincomparison = ">" themecolor = ":".join(__builtins__.map(str,color)) if flag_f: linecolor = "none" else: if type in ["line", "boundary"]: linecolor = themecolor else: linecolor = linecolor rangemin = __builtins__.min(breakpoints) rangemax = __builtins__.max(breakpoints) if not annotations: extranote = "" else: extranote = annotations[i] if i < xlower or i >= xupper: xline1 = line2 + 2 xline3 = line2 - 1 out(f_graph, locals(), """\ color $themecolor polygon 5 $xline1 8 $xline1 8 $xline3 5 $xline3 color $linecolor move 5 $xline1 draw 8 $xline1 draw 8 $xline3 draw 5 $xline3 draw 5 $xline1 move 14 $line2 color 0:0:0 text $openbracket$rangemin - $rangemax$closebracket $extranote """) else: if i == xlower: out(f_graph, locals(), """\ color 0:0:0 move 10 $line2 text ... """) else: #undo next increment line2 += 4 if i < xlower or i >= xupper: out(f_gisleg, locals(), """\ area $themecolor $linecolor - {$openbracket$rangemin - $rangemax$closebracket $extranote} """) if type in ["line", "boundary"]: out(f_psleg, locals(), """\ line 5% $xline1% 8% $xline1% color $linecolor end text 14% $xline1% $openbracket$rangemin - $rangemax$closebracket $extranote ref center left end """) elif type in ["point", "centroid"]: out(f_psleg, locals(), """\ point 8% $xline1% color $linecolor fcolor $themecolor size $size symbol $icon end text 14% $xline1% $openbracket$rangemin - $rangemax$closebracket $extranote ref center left end """) else: out(f_psleg, locals(), """\ rectangle 5% $xline1% 8% $xline3% color 0:0:0 fcolor $themecolor end text 14% $xline3% $openbracket$rangemin - $rangemax$closebracket DCADCA $extranote ref bottom left end """) else: if i == xlower: out(f_psleg, locals(), """\ color 0:0:0 text 14% $xline3% ... ref bottom left end """) f_gisleg.write("text - - - {...}\n") sys.stdout.write(subs(locals(), "$themecolor\t\t$openbracket$rangemin - $rangemax$closebracket $extranote\n")) if not where: sqlwhere = subs(locals(), "$column $mincomparison $rangemin AND $column <= $rangemax") else: sqlwhere = subs(locals(), "$column $mincomparison $rangemin AND $column <= $rangemax AND $where") # update color to database? if flag_u: sql = subs(locals(), "UPDATE $table SET GRASSRGB = '$themecolor' WHERE $sqlwhere") grass.write_command('db.execute', database = database, driver = driver, stdin = sql) # Create group for GIS Manager if flag_g: # change rgb colors to hex xthemecolor = "#%02X%02X%02X" % tuple(__builtins__.map(int, themecolor.split(":"))) #xlinecolor=`echo $linecolor | awk -F: '{printf("#%02X%02X%02X\n",$1,$2,$3)}'` if "$linecolor" == "black": xlinecolor = "#000000" else: xlinecolor = xthemecolor # create group entry out(f_group, locals(), """\ _check 1 Vector $column = $rangemin - $rangemax _check 1 map $map display_shape 1 display_cat 0 display_topo 0 display_dir 0 display_attr 0 type_point $ptype type_line $ltype type_boundary $btype type_centroid $ctype type_area $atype type_face 0 color $xlinecolor fcolor $xthemecolor width $ptsize _use_fcolor 1 lcolor #000000 sqlcolor 0 icon $icon size $ptsize field $layer lfield $layer attribute xref left yref center lsize 8 cat where $sqlwhere _query_text 0 _query_edit 1 _use_where 1 minreg maxreg _width 0.1 End """) # display theme vector map grass.run_command('d.vect', map = map, type = type, layer = layer, where = sqlwhere, color = linecolor, fcolor = themecolor, icon = icon, size = ptsize) if type in ["line", "boundary"]: out(f_psmap, locals(), """\ vlines $map type $type layer $layer where $sqlwhere color $linecolor label $rangemin - $rangemax end """) elif type in ["point", "centroid"]: out(f_psmap, locals(), """\ vpoints $map type $type layer $layer where $sqlwhere color $linecolor fcolor $themecolor symbol $icon label $rangemin - $rangemax end """) else: out(f_psmap, locals(), """\ vareas $map layer $layer where $sqlwhere color $linecolor fcolor $themecolor label $rangemin - $rangemax end """) # increment for next theme i += 1 if i == numint: color = endcolor else: color = [a - b for a, b in zip(color, clrstep)] line1 -= 4 line2 -= 4 line3 -= 4 #graduated points and line widths thematic mapping if themetype in ["graduated_points", "graduated_lines"]: #display graduated points/lines by intervals if themecalc == "interval": out(f_graph, locals(), """\ move 4 87 text Mapped by $numint intervals of $step """) out(f_gisleg, locals(), """\ subtitle - - - {Mapped by $numint intervals of $step} """) out(f_psleg, locals(), """\ text 4% 87% Mapped by $numint intervals of $step ref bottom left end """) msg(locals(), _("Mapped by $numint intervals of $step")) # display graduated points/lines for standard deviation units if themecalc == "std_deviation": out(f_graph, locals(), """\ move 4 87 text Mapped by standard deviation units of $sd (mean = $mean) """) out(f_gisleg, locals(), """\ subtitle - - - {Mapped by standard deviation units of $sd (mean = $mean)} """) out(f_psleg, locals(), """\ text 4% 87% Mapped by standard deviation units of $sd (mean = $mean) ref bottom left end """) msg(locals(), _("Mapped by standard deviation units of $sd (mean = $mean)")) # display graduated points/lines for quartiles if themecalc == "quartiles": out(f_graph, locals(), """\ move 4 87 text Mapped by quartiles (median = $q2) """) out(f_gisleg, locals(), """\ subtitle - - - {Mapped by quartiles (median = $q2)} """) out(f_psleg, locals(), """\ text 4% 87% Mapped by quartiles (median = $q2) ref bottom left end """) msg(locals(), _("Mapped by quartiles (median = $q2)")) line1 = 76 line2 = 75 out(f_graph, locals(), """\ move 4 83 text Size/width move 25 83 text Value move 4 80 text ============== move 25 80 text ============== """) out(f_psleg, locals(), """\ text 4% 83% Icon size ref bottom left end text 25% 83% Value ref bottom left end text 4% 80% ============ ref bottom left end text 25% 80% ============ ref bottom left end """) sys.stdout.write("Size/width\tValue\n") sys.stdout.write("==========\t=====\n") themecolor = pointcolor if flag_f: linecolor = "none" i = numint ptsize = maxsize while i >= 1: if flag_m: # math notation if i == 1: closebracket = "]" openbracket = "[" mincomparison = ">=" else: closebracket = "]" openbracket = "]" mincomparison = ">" else: closebracket = "" openbracket = "" if i == 1: mincomparison = ">=" else: mincomparison = ">" themecolor = pointcolor if flag_f: linecolor = "none" rangemin = __builtins__.min(breakpoints) rangemax = __builtins__.max(breakpoints) if not annotations: extranote = "" else: extranote = annotations[i] iconsize = int(ptsize / 2) lineht = int(ptsize / 4) if lineht < 4: lineht = 4 if i < xlower or i >= xupper: if themetype == "graduated_lines": out(f_graph, locals(), """\ color $linecolor """) out(f_gisleg, locals(), """\ line $themecolor $linecolor $ptsize {$openbracket$rangemin - $rangemax$closebracket $extranote} """) else: out(f_graph, locals(), """\ color $themecolor """) out(f_gisleg, locals(), """\ point $themecolor $linecolor $ptsize {$openbracket$rangemin - $rangemax$closebracket $extranote} """) out(f_graph, locals(), """\ icon + $iconsize 5 $line1 color 0:0:0 move 10 $line2 text $ptsize pts move 25 $line2 text $openbracket$rangemin - $rangemax$closebracket $extranote """) else: if i == xlower: out(f_graph, locals(), """\ color 0:0:0 move 10 $line2 text ... """) out(f_gisleg, locals(), """\ text - - - ... """) else: # undo future line increment line2 += lineht if i < xlower or i >= xupper: out(f_psleg, locals(), """\ point 8% $line1% color $linecolor fcolor $themecolor size $iconsize symbol $icon end text 25% $line1% $openbracket$rangemin - $rangemax$closebracket $extranote ref center left end """) else: if i == xlower: out(f_psleg, locals(), """\ text 25% $xline1% ... ref center left end """) sys.stdout.write(subs(locals(), "$ptsize\t\t$openbracket$rangemin - $rangemax$closebracket $extranote\n")) if not where: sqlwhere = subs(locals(), "$column $mincomparison $rangemin AND $column <= $rangemax") else: sqlwhere = subs(locals(), "$column $mincomparison $rangemin AND $column <= $rangemax AND $where") # update color to database? if flag_u: sql = subs(locals(), "UPDATE $table SET grassrgb = '$themecolor' WHERE $sqlwhere") grass.write_command('db.execute', database = database, driver = driver, stdin = sql) # Create group for GIS Manager if flag_g: # change rgb colors to hex xthemecolor = "#%02X%02X%02X" % tuple(__builtins__.map(int,themecolor.split(":"))) xlinecolor = "#000000" # create group entry out(f_group, locals(), """\ _check 1 Vector $column = $rangemin - $rangemax _check 1 map $map display_shape 1 display_cat 0 display_topo 0 display_dir 0 display_attr 0 type_point $ptype type_line $ltype type_boundary $btype type_centroid $ctype type_area $atype type_face 0 color $xlinecolor width $ptsize fcolor $xthemecolor _use_fcolor 1 lcolor #000000 sqlcolor 0 icon $icon size $ptsize field $layer lfield $layer attribute xref left yref center lsize 8 cat where $sqlwhere _query_text 0 _query_edit 1 _use_where 1 minreg maxreg _width 0.1 End """) #graduates line widths or point sizes if themetype == "graduated_lines": grass.run_command('d.vect', map = map, type = type, layer = layer, where = sqlwhere, color = linecolor, fcolor = themecolor, icon = icon, size = ptsize, width = ptsize) else: grass.run_command('d.vect', map = map, type = type, layer = layer, where = sqlwhere, color = linecolor, fcolor = themecolor, icon = icon, size = ptsize) out(f_psmap, locals(), """\ vpoints $map type $type layer $layer where $sqlwhere color $linecolor fcolor $themecolor symbol $icon size $ptsize label $rangemin - $rangemax end """) ptsize -= pointstep line1 -= lineht line2 -= lineht i -= 1 # Create graphic legend f_graph.close() if flag_l: grass.run_command('d.erase') grass.run_command('d.graph', input = tmp_graph) # Create group file for GIS Manager f_group.write("End\n") f_group.close() if flag_g: shutil.copyfile(tmp_group, "%s.dm" % group) # Create ps.map map file f_psmap.write("end\n") f_psmap.close() if psmap: shutil.copyfile(tmp_psmap, "%s.psmap" % psmap) # Create ps.map legend file f_psleg.write("end\n") f_psleg.close() if psmap: shutil.copyfile(tmp_psleg, "%s_legend.psmap" % psmap) # Create text file to use with d.graph in GIS Manager f_gisleg.close() if flag_s: tmpdir = os.path.dirname(tmp_gisleg) tlegfile = os.path.join(tmpdir, "gismlegend.txt") shutil.copyfile(tmp_gisleg, tlegfile)
def main(): table = options['table'] column = options['column'] force = flags['f'] # check if DB parameters are set, and if not set them. grass.run_command('db.connect', flags = 'c') kv = grass.db_connection() database = kv['database'] driver = kv['driver'] # schema needed for PG? if force: grass.message(_("Forcing ...")) if column == "cat": grass.warning(_("Deleting <%s> column which may be needed to keep table connected to a vector map") % column) cols = [f[0] for f in grass.db_describe(table)['cols']] if column not in cols: grass.fatal(_("Column <%s> not found in table") % column) if not force: grass.message(_("Column <%s> would be deleted.") % column) grass.message("") grass.message(_("You must use the force flag to actually remove it. Exiting.")) sys.exit(0) if driver == "sqlite": #echo "Using special trick for SQLite" # http://www.sqlite.org/faq.html#q13 colnames = [] coltypes = [] for f in grass.db_describe(table)['cols']: if f[0] == column: continue colnames.append(f[0]) coltypes.append("%s %s" % (f[0], f[1])) colnames = ", ".join(colnames) coltypes = ", ".join(coltypes) cmds = [ "BEGIN TRANSACTION", "CREATE TEMPORARY TABLE ${table}_backup(${coldef})", "INSERT INTO ${table}_backup SELECT ${colnames} FROM ${table}", "DROP TABLE ${table}", "CREATE TABLE ${table}(${coldef})", "INSERT INTO ${table} SELECT ${colnames} FROM ${table}_backup", "DROP TABLE ${table}_backup", "COMMIT" ] tmpl = string.Template(';\n'.join(cmds)) sql = tmpl.substitute(table = table, coldef = coltypes, colnames = colnames) else: sql = "ALTER TABLE %s DROP COLUMN %s" % (table, column) if grass.write_command('db.execute', input = '-', database = database, driver = driver, stdin = sql) != 0: grass.fatal(_("Cannot continue (problem deleting column)."))
def main(): map = options['map'] layer = options['layer'] columns = options['columns'].split(',') mapset = grass.gisenv()['MAPSET'] # does map exist in CURRENT mapset? if not grass.find_file(map, element='vector', mapset=mapset)['file']: grass.fatal(_("Vector map <%s> not found in current mapset") % map) f = grass.vector_layer_db(map, layer) table = f['table'] keycol = f['key'] database = f['database'] driver = f['driver'] if not table: grass.fatal(_("There is no table connected to the input vector map. " "Unable to delete any column. Exiting.")) if keycol in columns: grass.fatal(_("Unable to delete <%s> column as it is needed to keep table <%s> " "connected to the input vector map <%s>") % (keycol, table, map)) for column in columns: if column not in grass.vector_columns(map, layer): grass.warning(_("Column <%s> not found in table <%s>. Skipped") % (column, table)) continue if driver == "sqlite": # echo "Using special trick for SQLite" # http://www.sqlite.org/faq.html#q11 colnames = [] coltypes = [] for f in grass.db_describe(table, database=database, driver=driver)['cols']: if f[0] == column: continue colnames.append(f[0]) coltypes.append("%s %s" % (f[0], f[1])) colnames = ", ".join(colnames) coltypes = ", ".join(coltypes) cmds = [ "BEGIN TRANSACTION", "CREATE TEMPORARY TABLE ${table}_backup(${coldef})", "INSERT INTO ${table}_backup SELECT ${colnames} FROM ${table}", "DROP TABLE ${table}", "CREATE TABLE ${table}(${coldef})", "INSERT INTO ${table} SELECT ${colnames} FROM ${table}_backup", "CREATE UNIQUE INDEX ${table}_cat ON ${table} (${keycol} )", "DROP TABLE ${table}_backup", "COMMIT" ] tmpl = string.Template(';\n'.join(cmds)) sql = tmpl.substitute(table=table, coldef=coltypes, colnames=colnames, keycol=keycol) else: sql = "ALTER TABLE %s DROP COLUMN %s" % (table, column) try: grass.write_command('db.execute', input='-', database=database, driver=driver, stdin=sql) except CalledModuleError: grass.fatal(_("Deleting column failed")) # write cmd history: grass.vector_history(map)
def analyze_reference(): """compute the relief, contours, slope, water flow, difference, depressions, and concentrated flow of the reference landscape""" # temporary region gscript.use_temp_region() # set grass data directory grassdata = os.path.normpath("C:/Users/Brendan/Documents/grassdata/") # specify the full filepath filename of your grassdata directory # set rendering directory render_dir = os.path.normpath("results/reference/") render = os.path.join(grassdata,render_dir) # set color rules depressions_colors = '0% aqua\n100% blue' depth_colors = '0 255:255:255\n0.001 255:255:0\n0.05 0:255:255\n0.1 0:127:255\n0.5 0:0:255\n100% 0:0:0' difference_colors = '-0.5 blue\n0 white\n0.5 red' # set paramters overwrite = True # set DEM dem = "dem" # variables region=dem relief=dem.replace("dem","relief") contour=dem.replace("dem","contour") slope=dem.replace("dem","slope") depth=dem.replace("dem","depth") before="depth" after=depth difference=dem.replace("dem","diff") depressions=dem.replace("dem","depressions") concentrated_flow='concentrated_flow' concentrated_points='concentrated_points' # set region gscript.run_command('g.region', rast=region, res=3) # render DEM info = gscript.parse_command('r.info', map=dem, flags='g') width=int(info.cols)+int(info.cols)/2 height=int(info.rows) gscript.run_command('d.mon', start=driver, width=width, height=height, output=os.path.join(render,dem+".png"), overwrite=overwrite) gscript.run_command('r.colors', map=dem, color="elevation") gscript.run_command('g.region', rast="dem") gscript.run_command('r.relief', input=dem, output=relief, altitude=90, azimuth=45, zscale=1, units="intl", overwrite=overwrite) gscript.run_command('g.region', rast=region) gscript.run_command('r.contour', input=dem, output=contour, step=5, overwrite=overwrite) gscript.run_command('d.shade', shade=relief, color=dem, brighten=75) gscript.run_command('d.vect', map=contour, display="shape") gscript.run_command('d.legend', raster=dem, fontsize=9, at=(10,70,1,4)) gscript.run_command('d.mon', stop=driver) # compute slope gscript.run_command('d.mon', start=driver, width=width, height=height, output=os.path.join(render,slope+".png"), overwrite=overwrite) gscript.run_command('r.param.scale', input=dem, output=slope, size=9, method="slope", overwrite=overwrite) gscript.run_command('r.colors', map=slope, color="slope") gscript.run_command('d.shade', shade=relief, color=slope, brighten=75) gscript.run_command('d.vect', map=contour, display='shape') gscript.run_command('d.legend', raster=slope, fontsize=9, at=(10,90,1,4)) gscript.run_command('d.mon', stop=driver) # simulate water flow gscript.run_command('d.mon', start=driver, width=width, height=height, output=os.path.join(render,depth+".png"), overwrite=overwrite) gscript.run_command('r.slope.aspect', elevation=dem, dx='dx', dy='dy', overwrite=overwrite) gscript.run_command('r.sim.water', elevation=dem, dx='dx', dy='dy', rain_value=300, depth=depth, nwalkers=5000, niterations=4, overwrite=overwrite) gscript.run_command('g.remove', flags='f', type='raster', name=['dx', 'dy']) gscript.run_command('d.shade', shade=relief, color=depth, brighten=75) gscript.run_command('d.vect', map=contour, display='shape') gscript.run_command('d.legend', raster=depth, fontsize=9, at=(10,90,1,4)) gscript.run_command('d.mon', stop=driver) # identify depressions gscript.run_command('r.fill.dir', input=dem, output='depressionless_dem', direction='flow_dir',overwrite=overwrite) gscript.run_command('r.mapcalc', expression='{depressions} = if({depressionless_dem} - {dem} > {depth}, {depressionless_dem} - {dem}, null())'.format(depressions=depressions, depressionless_dem='depressionless_dem', dem=dem, depth=0), overwrite=overwrite) gscript.write_command('r.colors', map=depressions, rules='-', stdin=depressions_colors) gscript.run_command('d.mon', start=driver, width=width, height=height, output=os.path.join(render,depressions+".png"), overwrite=overwrite) gscript.run_command('d.shade', shade=relief, color=depressions, brighten=75) gscript.run_command('d.vect', map=contour, display='shape') gscript.run_command('d.legend', raster=depressions, fontsize=9, at=(10,90,1,4)) gscript.run_command('d.mon', stop=driver) # compute the difference between the modeled and reference flow depth gscript.run_command('d.mon', start=driver, width=width, height=height, output=os.path.join(render,difference+".png"), overwrite=overwrite) gscript.run_command('r.mapcalc', expression='{difference} = {before} - {after}'.format(before=before,after=after,difference=difference), overwrite=overwrite) gscript.write_command('r.colors', map=difference, rules='-', stdin=difference_colors) gscript.run_command('d.shade', shade=relief, color=difference, brighten=75) gscript.run_command('d.vect', map=contour, display='shape') gscript.run_command('d.legend', raster=difference, fontsize=9, at=(10,90,1,4)) gscript.run_command('d.mon', stop=driver) # extract concentrated flow gscript.run_command('r.mapcalc', expression='{concentrated_flow} = if({depth}>=0.05,{depth},null())'.format(depth=depth,concentrated_flow=concentrated_flow), overwrite=overwrite) gscript.write_command('r.colors', map=concentrated_flow, rules='-', stdin=depth_colors) gscript.run_command('r.random', input=concentrated_flow, npoints='100%', vector=concentrated_points, overwrite=overwrite) gscript.run_command('d.mon', start=driver, width=width, height=height, output=os.path.join(render,concentrated_flow+".png"), overwrite=overwrite) gscript.run_command('d.shade', shade=relief, color=concentrated_flow, brighten=75) gscript.run_command('d.vect', map=concentrated_points, display='shape') gscript.run_command('d.legend', raster=concentrated_flow, fontsize=9, at=(10,90,1,4)) gscript.run_command('d.mon', stop=driver) # compute number of cells with depressions univar = gscript.parse_command('r.univar', map=depressions, separator='newline', flags='g') depression_cells = float(univar['sum']) print 'cells with depressions: ' + str(depression_cells)
def set_colors(map, v0, v1): rules = ["0% black\n", "%f black\n" % v0, "%f white\n" % v1, "100% white\n"] rules = "".join(rules) grass.write_command("r.colors", map=map, rules="-", stdin=rules)
def main(): map = options['map'] layer = options['layer'] column = options['column'] otable = options['otable'] ocolumn = options['ocolumn'] if options['scolumns']: scolumns = options['scolumns'].split(',') else: scolumns = None f = grass.vector_layer_db(map, layer) maptable = f['table'] database = f['database'] driver = f['driver'] if driver == 'dbf': grass.fatal(_("JOIN is not supported for tables stored in DBF format")) if not maptable: grass.fatal(_("There is no table connected to this map. Unable to join any column.")) # check if column is in map table if not grass.vector_columns(map, layer).has_key(column): grass.fatal(_("Column <%s> not found in table <%s>") % (column, maptable)) # describe other table all_cols_ot = grass.db_describe(otable, driver = driver, database = database)['cols'] # check if ocolumn is on other table if ocolumn not in [ocol[0] for ocol in all_cols_ot]: grass.fatal(_("Column <%s> not found in table <%s>") % (ocolumn, otable)) # determine columns subset from other table if not scolumns: # select all columns from other table cols_to_add = all_cols_ot else: cols_to_add = [] # check if scolumns exists in the other table for scol in scolumns: found = False for col_ot in all_cols_ot: if scol == col_ot[0]: found = True cols_to_add.append(col_ot) break if not found: grass.warning(_("Column <%s> not found in table <%s>.") % (scol, otable)) all_cols_tt = grass.vector_columns(map, int(layer)).keys() select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column" template = string.Template("UPDATE $table SET $colname=(%s);" % select) for col in cols_to_add: # skip the vector column which is used for join colname = col[0] if colname == column: continue # Sqlite 3 does not support the precision number any more if len(col) > 2 and driver != "sqlite": coltype = "%s(%s)" % (col[1], col[2]) else: coltype = "%s" % col[1] colspec = "%s %s" % (colname, coltype) # add only the new column to the table if colname not in all_cols_tt: if grass.run_command('v.db.addcolumn', map = map, columns = colspec, layer = layer) != 0: grass.fatal(_("Error creating column <%s>") % colname) stmt = template.substitute(table = maptable, column = column, otable = otable, ocolumn = ocolumn, colname = colname) grass.debug(stmt, 1) grass.verbose(_("Updating column <%s> of vector map <%s>...") % (colname, map)) if grass.write_command('db.execute', stdin = stmt, input = '-', database = database, driver = driver) != 0: grass.fatal(_("Error filling column <%s>") % colname) # write cmd history grass.vector_history(map) return 0
for dem in area: if not dem.startswith('tdx12'): diff_map = 'tdx12_diff_' + dem.split('_')[0] + '_wgs84_' + area_name diff_map_array = raster_as_1d_array(diff_map) df_stats[diff_map] = calc_stats(diff_map_array) # export stats to latex print('\n\n') print df_stats.transpose().to_latex(float_format=lambda x:'%4.2f' % x) # lists of all diff maps diff_list = grass.list_grouped('raster', pattern='*diff*')['tandemX_brasil'] # fix colortable (color gradient from blue-white-red beteween -15/+15, then darker colors to min-max) rule = '''0% 0:30:110 \n-15 blue \n0 white \n15 red \n100% 125:25:15''' for diff_map in diff_list: grass.write_command('r.colors', map=diff_map, rules='-', stdin=rule) # export diffs maps as pngs for diff_map in diff_list: area_name = diff_map.split('_')[-1] grass.run_command('g.region', raster=diff_map, flags='pa', res='0:0:01') # diff map grass.run_command('d.mon', start='cairo', output=diff_map+'.png', resolution='3', \ height=500, width=500, overwrite=True) grass.run_command('d.rast', map=diff_map) grass.run_command('d.grid', size='0.25', text_color='black', flags='c') grass.run_command('d.legend', raster=diff_map, flags='tsbd', at='4,25,6,8', \ font='Helvetica', fontsize=12, bgcolor='240:240:240', range=(-30,30)) grass.run_command('d.mon', stop='cairo') # export histograms as pngs
for line in output.stdout: vals.append(float(line.rstrip('\r\n').split('-')[1])) print "Layer maximum value: %s" % vals[-1] # Recode the raster using these rules # A better solution would be to read in a rules file, but I'm too lazy. # If you want to do that, you'll need to specify a file location # and remove the write_command call. # We're also going to be really snarky and recode to the maximum int value (255). #That's mostly because I don't know how to programmatically #rescale a float to an int print "Recoding raster layer" rules = "0.0:" + str(vals[-1]) + ":0:255" #r.recode(input=raster_layer, rules=rules, output=recoded_raster_layer) g.write_command('r.recode', input=raster_layer, rule='-', output=recoded_raster_layer, stdin=rules) # Now, we apply our color table # Again, we'll pipe this from stdin, but you should probably use a file. # These rules get way more complicated, since we need multiple lines, # but I'm still too lazy to write to a file. Also, \n's are fun. print "Applying new color table" color_rules = "0% blue\n33.33% green\n66.67% yellow\n100% red" g.write_command('r.colors', map=recoded_raster_layer, rules='-', stdin=color_rules) # Set NULL values to remove noise # I'm doing a fairly aggressive data purge, use respnsibly print "Nullifying nulls" r.null(map=recoded_raster_layer, setnull="0-20") # Finally, write it to a GeoTiff
# set categories and colors (reclassified maps) rule = '''0 thru 2 = NULL\n 3 thru 4 = 1 1 x AMP_thresh_1\n 5 thru 6 = 2 2 x AMP_thresh_1\n 7 thru 8 = 3 3 x AMP_thresh_1\n 9 thru 16 = 4 1 x AMP_thresh_2\n 17 thru 25 = 5 2 x AMP_thresh_2\n 26 thru 32 = 6 3 x AMP_thresh_2\n 32 thru 64 = 7 1 x COH_thresh_1\n 65 thru 96 = 8 2 x COH_thresh_1\n 97 thru 127 = 9 3 x COH_thresh_1\n ''' for tdx in tdx_wam: grass.write_command('r.reclass', input=tdx, output=tdx+'_cats', rules='-', stdin=rule, overwrite=True) #---------------------------------------------------- #---------------------------------------------------- # Import SRTM 01 sec data #---------------------------------------------------- #---------------------------------------------------- # set working directory workDir = '/Volumes/MacintoshHD2/Dropbox/USP/projetosPesquisa/TanDEM-X/gdems/srtm_30m/' os.chdir(workDir) # name the files Araca30 = 'N00W064.hgt' Barcelos30 = 'S01W063.hgt'
'r10ap', 'r15ap', 'r20ap', 'r25ap', 'r30ap', 'r35ap', \ 'r40ap', 'r45ap', 'r50ap', 'r55ap', 'r01ap'] region = ['amazon','qf','andes'] for reg in region: for param in ['aspect_compass','slope']: vect = reg + '_' + param + '_random' print 'maintenance for vector ' + vect for col in colList: # get column from db as a dict col_dict = gvect.vector_db_select(map=vect, columns=col)['values'] # get cats of NULL entries null_list = [int(i[1]) for i in col_dict.values() if i[0]==''] print 'removing NULL entries...' for n in null_list: grass.write_command("db.execute", \ stdin="DELETE FROM %s WHERE cat = %d" % (vect,n)) grass.db_describe(vect) #---------------------------------------------------- # 9.3 - correlation for slope (linear) # files for results os.chdir('/Volumes/HDD/Users/guano/Dropbox/artigos/derivadas_dem/stats') fileOut = open('correlacao_PA_AP_original_slope.txt', 'w') fileOut.write('coeficientes de correlacao - SLOPE \n') colListPA = ['r10pa', 'r15pa', 'r20pa', 'r25pa', 'r30pa', \ 'r35pa', 'r40pa', 'r45pa', 'r50pa', 'r55pa', 'r01pa'] colListAP = ['r10ap', 'r15ap', 'r20ap', 'r25ap', 'r30ap', \
accum1 = accum[:L] accum2 = accum[L:] z1 = z[:L] z2 = z[L:] # And likewise turn small lists into big ones cat2 = np.vstack((cat, cat)) streamSegments = """ # No longer needed #incomplete_basin_cats = list(set(list(cat2[accum <= 0].squeeze()))) """ from contextlib import redirect_stdout # check: http://eli.thegreenplace.net/2015/redirecting-all-kinds-of-stdout-in-python/ xystr = '' for x, y in xy: #print x, y xystr += str(x)+' '+str(y)+'\n' grass.write_command('r.what', map='srtm,flowAccum', output='-', stdin=xystr) """ nsegs_at_point = [] for row in xy: nsegs_at_point.append(np.sum( np.prod(xy == row, axis=1))) nsegs_at_point = np.array(nsegs_at_point)
def main(): reticolo = options["map"] distanza = options["distanza"] sponde = options["sponde"] semilargh = int(options["semilarghezza"]) # ~ calcolo la lunghezza del tratto fluviale lunghezza = float( grass.read_command("v.to.db", map=reticolo, option="length", colum="pp", flags="p").split("\n")[1].split("|")[1] ) line_cat = int( grass.read_command("v.to.db", map=reticolo, option="length", colum="pp", flags="p").split("\n")[1].split("|")[0] ) # ~ determino il numero di passi npassi = int(lunghezza / int(distanza)) # ~ npassi e anche la variabile che conta il numero vero di sezioni trasversali f = open("/tmp/cvs.txt", "w") step = int(distanza) # ~ creo i punti estremi delle sezioni trasversali # ~ creo il file per v.segment for i in range(npassi): f.write("P %s %s %s %s\n" % (i, line_cat, step, semilargh)) f.write("P %s %s %s -%s\n" % (i, line_cat, step, semilargh)) step = step + int(distanza) f.close() grass.run_command("v.segment", input=reticolo, output="punti", overwrite=True, file="/tmp/cvs.txt") # ~ creo il file di punti posti sull'asse alveo che conterra i valori di larghezza chiamato PUNTI_ASSE f1 = open("/tmp/cvs1.txt", "w") step = int(distanza) for i in range(npassi): f1.write("P %s %s %s\n" % (i, line_cat, step)) step = step + int(distanza) f1.close() grass.run_command("v.segment", input=reticolo, output="punti_asse", overwrite=True, file="/tmp/cvs1.txt") grass.run_command("v.db.addtable", map="punti_asse", layer="1", columns="cat integer") grass.run_command("v.db.addcol", map="punti_asse", layer="1", columns="largh double") # ~ os.remove('csv.txt') f = open("/tmp/line.txt", "w") step = int(distanza) # ~ creo il file per v.in.ascii for i in range(npassi): grass.run_command( "v.extract", input="punti", output="punti%d" % (i), layer="1", list=i, new="-1", overwrite=True ) grass.run_command( "v.category", input="punti%d" % (i), output="puntidel%d" % (i), option="del", type="point", overwrite=True ) grass.run_command( "v.category", input="puntidel%d" % (i), output="puntiadd%d" % (i), option="add", type="point", overwrite=True, ) line = grass.read_command("v.to.db", map="puntiadd%d" % (i), option="coor", colum="pp", flags="p") xstart = float(line.split("\n")[1].split("|")[1]) ystart = float(line.split("\n")[1].split("|")[2]) xend = float(line.split("\n")[2].split("|")[1]) yend = float(line.split("\n")[2].split("|")[2]) f.write("L 2 1\n") f.write("%s %s\n" % (xstart, ystart)) f.write("%s %s\n" % (xend, yend)) f.write("1 %s\n" % (i)) grass.run_command("g.remove", vect="punti%d,puntidel%d,puntiadd%d" % (i, i, i)) step = step + int(distanza) f.close() grass.run_command("v.in.ascii", flags="n", input="/tmp/line.txt", output="linee", overwrite=True, format="standard") # ~ predispongo il vettoriale linee ad accettare le larghezze grass.run_command("v.db.addtable", map="linee", layer="1", columns="cat integer") # ~ grass.run_command('v.db.addcol', map = 'linee', layer = '1' , columns ='largh double') # ~ devo occuparmi di calcolare solo i tratti lunghi di ogni segmento grass.run_command( "v.overlay", ainput="linee", atype="line", binput=sponde, output="linee_clip", operator="and", overwrite=True ) # ~ calcolo tutte le lunghezze dei vari segmentini lunghezze = grass.read_command("v.to.db", flags="p", column="pp", map="linee_clip", option="length").split("\n") for sezione in range(npassi): cat_dup = grass.read_command( "db.select", flags="c", table="linee_clip", sql="SELECT cat FROM linee_clip WHERE a_cat=%d" % (sezione) ).split("\n") cat_dup.remove("") lungh_parziali = list() for j in cat_dup: lungh_parziali.append(float(lunghezze[int(j)].split("|")[1])) grass.write_command( "db.execute", stdin="UPDATE punti_asse SET largh=%f WHERE cat=%d" % (max(lungh_parziali), sezione) )
def main(): input = options['input'] db_table = options['db_table'] output = options['output'] key = options['key'] mapset = grass.gisenv()['MAPSET'] if db_table: input = db_table if not output: tmpname = input.replace('.', '_') output = grass.basename(tmpname) # check if table exists try: nuldev = file(os.devnull, 'w+') s = grass.read_command('db.tables', flags='p', quiet=True, stderr=nuldev) nuldev.close() except CalledModuleError: # check connection parameters, set if uninitialized grass.read_command('db.connect', flags='c') s = grass.read_command('db.tables', flags='p', quiet=True) for l in s.splitlines(): if l == output: if grass.overwrite(): grass.warning(_("Table <%s> already exists and will be " "overwritten") % output) grass.write_command('db.execute', input='-', stdin="DROP TABLE %s" % output) break else: grass.fatal(_("Table <%s> already exists") % output) # treat DB as real vector map... layer = db_table if db_table else None vopts = {} if options['encoding']: vopts['encoding'] = options['encoding'] try: grass.run_command('v.in.ogr', flags='o', input=input, output=output, layer=layer, quiet=True, **vopts) except CalledModuleError: if db_table: grass.fatal( _("Input table <%s> not found or not readable") % input) else: grass.fatal(_("Input DSN <%s> not found or not readable") % input) # rename ID col if requested from cat to new name if key: grass.write_command('db.execute', quiet=True, input='-', stdin="ALTER TABLE %s ADD COLUMN %s integer" % (output, key)) grass.write_command('db.execute', quiet=True, input='-', stdin="UPDATE %s SET %s=cat" % (output, key)) # ... and immediately drop the empty geometry vectfile = grass.find_file(output, element='vector', mapset=mapset)['file'] if not vectfile: grass.fatal(_("Something went wrong. Should not happen")) else: # remove the vector part grass.run_command('v.db.connect', quiet=True, map=output, layer='1', flags='d') grass.run_command('g.remove', flags='f', quiet=True, type='vector', name=output) # get rid of superfluous auto-added cat column (and cat_ if present) nuldev = file(os.devnull, 'w+') grass.run_command('db.dropcolumn', quiet=True, flags='f', table=output, column='cat', stdout=nuldev, stderr=nuldev) nuldev.close() records = grass.db_describe(output)['nrows'] grass.message(_("Imported table <%s> with %d rows") % (output, records))
def main(): ''' acces BDD PostGIS''' BDDNAME='saturne' IP='92.222.75.150' LOGIN='******' MDP='z9pY1Pm6dKaTuNfwMbSj' ''' importation ''' # .message("Importation du reseau RCU...") # SCHEM='topology' # COUCHE='linear' # sql="source = 'LIGNES_CONSTRUCTIBLES_2015_05_11' AND id NOT LIKE 'TRONROUT%'" # grass.run_command('v.in.ogr', input='PG:dbname=' + BDDNAME + ' host=' + IP + ' port=5432' + ' user='******' password='******' sslmode=require' + ' schemas=' + SCHEM, output='ORI_rsxRCU', where=sql, layer=COUCHE, quiet=True) # grass.message("Importation du reseau routier...") # SCHEM='topology' # COUCHE='linear' # sql="source = 'LIGNES_CONSTRUCTIBLES_2015_05_11' AND id LIKE 'TRONROUT%'" # grass.run_command('v.in.ogr', input='PG:dbname=' + BDDNAME + ' host=' + IP + ' port=5432' + ' user='******' password='******' sslmode=require' + ' schemas=' + SCHEM, output='ORI_rsxRTE', where=sql, layer=COUCHE, quiet=True) # ORI_enveloppebati - ne pas nettoyer la geometrie # grass.message("Importation des chaufferies...") # SCHEM='topology' # COUCHE='punctual' # sql="source = 'SAISIE_MANUELLE_DALKIA_FORCITY'" # grass.run_command('v.in.ogr', input='PG:dbname=' + BDDNAME + ' host=' + IP + ' port=5432' + ' user='******' password='******' sslmode=require' + ' schemas=' + SCHEM, output='ORI_Chauff', where=sql, layer=COUCHE, quiet=True) ''' Creation des couches finales ''' grass.message("Preparation des couches...") grass.run_command('g.copy', vector=('ORI_enveloppebati','TMPPP_bati'), quiet=True) #enveloppes bti ! # non utile a priori - grass.run_command('v.clean', flags='c', input="ORI_bati", output='TMPPP_bati_full', tool='break', quiet=True) # grass.run_command('v.clean', flags='c', input="ORI_rsxRCU", output='TMPPP_rsx_RCU', tool='break', quiet=True) # grass.run_command('v.clean', flags='c', input="ORI_rsxRTE", output='TMPPP_rsx_ROUTES', tool='break', quiet=True) grass.run_command('g.copy', vector=('ORI_Chauff','TMPP_Chauff'), quiet=True) ''' recuperation des distances ''' grass.message("calcul des distances...") grass.run_command('v.db.addcolumn', map='TMPP_Chauff', columns='BATIID varchar(254),RSXRCU DOUBLE PRECISION,RSXRTETP DOUBLE PRECISION,RSXRTE DOUBLE PRECISION,DISTBATI DOUBLE PRECISION', quiet=True) grass.read_command('v.distance', _from='TMPP_Chauff', from_type='point', to='TMPPP_bati', to_type='area', upload='to_attr', to_colum='id', column='BATIID', quiet=True) grass.read_command('v.distance', _from='TMPP_Chauff', from_type='point', to='TMPPP_bati', to_type='area', upload='dist', column='DISTBATI', quiet=True) grass.read_command('v.distance', _from='TMPP_Chauff', from_type='point', to='TMPPP_rsx_RCU', to_type='line', upload='dist', column='RSXRCU', quiet=True) grass.read_command('v.distance', _from='TMPP_Chauff', from_type='point', to='TMPPP_rsx_ROUTES', to_type='line', upload='dist', column='RSXRTETP', quiet=True) grass.run_command('v.db.update', map='TMPP_Chauff', column='RSXRTE', query_column='RSXRTETP+32', quiet=True) grass.run_command('v.db.dropcolumn', map='TMPP_Chauff', columns='RSXRTETP', quiet=True) ''' exctraction selon criteres ''' ListChauff=[] grass.message("extraction...") expr='RSXRCU<RSXRTE' grass.run_command('v.extract', input='TMPP_Chauff', output='ORI_Chauff_RCU', where=expr, quiet=True) grass.run_command('v.db.renamecolumn', map='ORI_Chauff_RCU', column=('RSXRCU','DISTRSX'), quiet=True) grass.run_command('v.db.dropcolumn', map='ORI_Chauff_RCU', columns='RSXRTE', quiet=True) infopt = grass.vector_info_topo('ORI_Chauff_RCU') if infopt['points'] > 0: ListChauff.append('ORI_Chauff_RCU') grass.run_command('v.extract', flags='r', input='TMPP_Chauff', output='ORI_Chauff_RTE', where=expr, quiet=True) grass.run_command('v.db.renamecolumn', map='ORI_Chauff_RTE', column=('RSXRTE','DISTRSX'), quiet=True) grass.run_command('v.db.dropcolumn', map='ORI_Chauff_RTE', columns='RSXRCU', quiet=True) infopt = grass.vector_info_topo('ORI_Chauff_RTE') if infopt['points'] > 0: ListChauff.append('ORI_Chauff_RTE') ''' verification des connections possibles ''' grass.message("finalisation...") for layer in ListChauff: expr='DISTBATI>DISTRSX' grass.run_command('v.extract', input=layer, output='TMPP_NetSimple_' + layer, where=expr, quiet=True) grass.run_command('v.extract', flags='r', input=layer, output='TMPPP_NetProcess_' + layer, where=expr, quiet=True) infopt = grass.vector_info_topo('TMPP_NetSimple_ORI_Chauff_RCU') if infopt['points'] > 0: grass.read_command('v.distance', flags='p', _from='TMPP_NetSimple_ORI_Chauff_RCU', from_type='point', to='TMPPP_rsx_RCU', to_type='line', upload='dist', output='TP_NetSimple_ORI_Chauff_CONNECT_RCU', quiet=True) grass.run_command('v.db.addtable', map='TP_NetSimple_ORI_Chauff_CONNECT_RCU', columns="source varchar(255), id varchar(255), valid_time varchar(255)", quiet=True) grass.run_command('v.db.update', map='TP_NetSimple_ORI_Chauff_CONNECT_RCU', column='source', value='RACCsimpleRCU_FORCITY', quiet=True) infopt = grass.vector_info_topo('TMPP_NetSimple_ORI_Chauff_RTE') if infopt['points'] > 0: grass.read_command('v.distance', flags='p', _from='TMPP_NetSimple_ORI_Chauff_RTE', from_type='point', to='TMPPP_rsx_ROUTES', to_type='line', upload='dist', output='TP_NetSimple_ORI_Chauff_CONNECT_RTE', quiet=True) grass.run_command('v.db.addtable', map='TP_NetSimple_ORI_Chauff_CONNECT_RTE', columns="source varchar(255), id varchar(255), valid_time varchar(255)", quiet=True) grass.run_command('v.db.update', map='TP_NetSimple_ORI_Chauff_CONNECT_RTE', column='source', value='RACCsimpleRTE_FORCITY', quiet=True) dvectNetSimple = grass.parse_command('g.list', type="vect", pattern="TP_NetSimple_ORI_Chauff_CONNECT_*", quiet=True) grass.run_command('v.edit', tool='create', map='TP_Raccord_NetSimple_Finale', quiet=True) grass.run_command('v.db.addtable', map='TP_Raccord_NetSimple_Finale', columns="source varchar(255), id varchar(255), valid_time varchar(255)", quiet=True) for vct in dvectNetSimple: grass.run_command('v.patch', flags='ea', input=vct, output='TP_Raccord_NetSimple_Finale', quiet=True, overwrite=True) grass.run_command('v.db.update', map='TP_Raccord_NetSimple_Finale', column='source', value='RACC_FORCITY', quiet=True) grass.run_command('v.db.update', map='TP_Raccord_NetSimple_Finale', column='id', value='RACC_DIRECT', quiet=True) tempfile = ['TMPP_*'] clean(tempfile) # ''' connection complexe ''' # ''' Creation de la couche finale ''' listerrorRCU=[] listerrorRTE=[] grass.message("Preparation des couches...") grass.run_command('v.patch', flags='e', input=('TMPPP_NetProcess_ORI_Chauff_RCU','TMPPP_NetProcess_ORI_Chauff_RTE'), output='TMPPP_NetProcess', quiet=True) grass.run_command('v.db.addcolumn', map='TMPPP_NetProcess', columns='NUM INTEGER', quiet=True) valuecat=grass.parse_command('v.category', input='TMPPP_NetProcess', type='point', option='print') newcat=1 for f in valuecat: grass.write_command("db.execute", input="-", stdin="update TMPPP_NetProcess SET NUM='{0}' WHERE cat='{1}'".format(newcat,str(f)), quiet=True) newcat+=1 grass.run_command('v.edit', tool='create', map='TP_Raccord_NetProcess_Finale', quiet=True) grass.run_command('v.db.addtable', map='TP_Raccord_NetProcess_Finale', columns="source varchar(255), id varchar(255), valid_time varchar(255)", quiet=True) infopt = grass.vector_info_topo('TMPPP_NetProcess') objpt = 1 while objpt <= infopt['points']: grass.message("\nPoint N. {0} / {1}" .format(objpt,infopt['points'])) grass.message("extraction des chaufferies...") expr='NUM='+str(objpt) grass.run_command('v.extract', input='TMPPP_NetProcess', output='TMP_NetProcess_pt'+str(objpt), where=expr, quiet=True) grass.message("extraction du batiment correspondant et retraitement...") catbati=grass.read_command('v.db.select', flags='cv', map='TMP_NetProcess_pt'+str(objpt), columns='BATIID', quiet=True) attbati=catbati.split()[0] expr="id='"+str(attbati)+"'" # grass.run_command('v.extract', input='TMPPP_bati', output='TMP_NetProcess_bati_extract'+str(objpt), where=expr, quiet=True) grass.run_command('v.extract', input='TMPPP_bati', output='TMP_NetProcess_bati_ext'+str(objpt), where=expr, quiet=True) grass.run_command('v.centroids', input='TMP_NetProcess_bati_ext'+str(objpt), output='TMP_NetProcess_bati_ext_centro'+str(objpt), quiet=True) grass.run_command('v.category', input='TMP_NetProcess_bati_ext_centro'+str(objpt), output='TMP_NetProcess_bati_ext_centro_catego'+str(objpt), option='add', quiet=True) grass.run_command('v.extract', flags='d', input='TMP_NetProcess_bati_ext_centro_catego'+str(objpt), output='TMP_NetProcess_bati_extract'+str(objpt), new='1', quiet=True) grass.message("extraction des coordonnees du batiment correspondant...") coordbati=grass.read_command('v.info', flags='g', map='TMP_NetProcess_bati_extract'+str(objpt), quiet=True) dictcoordbati= dict( (n,str(v)) for n,v in (a.split('=') for a in coordbati.split() ) ) grass.message("calcul de la distance du reseau le plus proche...") distrsxRTE = grass.read_command('v.distance', flags='p', _from='TMP_NetProcess_pt'+str(objpt), from_type='point', to='TMPPP_rsx_ROUTES', to_type='line', upload='dist', quiet=True) distancersxRTE=[] for i in distrsxRTE.split(): distancersxRTE.append(i.split('|')) distancersxRTE.remove(distancersxRTE[0]) distanceRTE = ceil(float(distancersxRTE[0][1])) distrsxRCU = grass.read_command('v.distance', flags='p', _from='TMP_NetProcess_pt'+str(objpt), from_type='point', to='TMPPP_rsx_RCU', to_type='line', upload='dist', quiet=True) distancersxRCU=[] for i in distrsxRCU.split(): distancersxRCU.append(i.split('|')) distancersxRCU.remove(distancersxRCU[0]) distanceRCU = ceil(float(distancersxRCU[0][1])) ''' On recherche la coordonnes la plus proche par rapport aux deux reseaux ; la valeur 10 est le decalage possible # CORRECTION +60m car les chauff loin des batiment sont pas pris en compte le reseau peux ne pas etre pris par l'overlay ensuite de la region calculee ci dessus... dou 60m.. ''' if (distanceRCU <= distanceRTE+32): distance = distanceRCU+32 else: distance = distanceRTE+32 grass.message("calage de la region sur l'emprise du batiment correspondant...") grass.run_command('g.region', flags='a', n=float(dictcoordbati['north'])+distance, s=float(dictcoordbati['south'])-distance, e=float(dictcoordbati['east'])+distance, w=float(dictcoordbati['west'])-distance, quiet=True) grass.run_command('v.in.region', output='TMP_NetProcess_region_L'+str(objpt), quiet=True) grass.message("extraction du reseau sur la region de travail...") if (distanceRCU <= distanceRTE+32): # RCU == 'ok': grass.message("RCU - conversion ligne du reseau vers point et calcul du cout...") grass.run_command('v.overlay', flags='t', ainput='TMPPP_rsx_RCU', atype='line', binput='TMP_NetProcess_region_L'+str(objpt), output='TMP_NetProcess_rsx_RCU'+str(objpt), operator='and', quiet=True) grass.run_command('v.to.points', input='TMP_NetProcess_rsx_RCU'+str(objpt), output='TMP_NetProcess_rsx_RCU_pt'+str(objpt), dmax='1', quiet=True) grass.run_command('g.copy', vector=('TMP_NetProcess_rsx_RCU_pt'+str(objpt),'TMP_NetProcess_rsx_pt'+str(objpt)), quiet=True) typeracc = 'RCU' else: grass.message("ROUTES - conversion ligne du reseau vers point et calcul du cout...") grass.run_command('v.overlay', flags='t', ainput='TMPPP_rsx_ROUTES', atype='line', binput='TMP_NetProcess_region_L'+str(objpt), output='TMP_NetProcess_rsx_ROUTES'+str(objpt), operator='and', quiet=True) grass.run_command('v.to.points', input='TMP_NetProcess_rsx_ROUTES'+str(objpt), output='TMP_NetProcess_rsx_ROUTES_pt'+str(objpt), dmax='1', quiet=True) grass.run_command('g.copy', vector=('TMP_NetProcess_rsx_ROUTES_pt'+str(objpt),'TMP_NetProcess_rsx_pt'+str(objpt)), quiet=True) typeracc = 'ROUTES' grass.message("assemblage des bati point et du reseau point pour un maillage de point complet...") grass.run_command('v.to.points', input='TMP_NetProcess_bati_extract'+str(objpt), output='TMP_NetProcess_region_bati_pt'+str(objpt), type='area', dmax='1', quiet=True) grass.run_command('v.patch', input=('TMP_NetProcess_region_bati_pt'+str(objpt),'TMP_NetProcess_rsx_pt'+str(objpt)), output='TMP_NetProcess_pt_bati_rsx'+str(objpt), quiet=True) grass.message("creation du diagramme de voronoi...") grass.run_command('v.voronoi', flags='l', input='TMP_NetProcess_pt_bati_rsx'+str(objpt), output='TMP_NetProcess_pt_bati_rsx_voro'+str(objpt), quiet=True) grass.message("suppression des lignes du voronoi a linterieur de chaque bati...") grass.run_command('v.overlay', flags='t', ainput='TMP_NetProcess_pt_bati_rsx_voro'+str(objpt), atype='line', binput='TMP_NetProcess_bati_extract'+str(objpt), output='TMP_NetProcess_voroNot_'+str(objpt), operator='not', quiet=True) grass.message("prise en compte des autres batiments...") grass.run_command('v.select', ainput='TMPPP_bati', atype='area', binput="TMP_NetProcess_region_L"+str(objpt), btype='area', output='TMP_NetProcess_bati_select'+str(objpt), operator='overlap', quiet=True) ''' grass.run_command('v.clean', flags='c', input="TMP_NetProcess_bati_select"+str(objpt), output='TMP_NetProcess_bati_select_cl'+str(objpt), tool='snap,break,bpol', type='boundary', threshold='1', quiet=True)''' '''fusion''' # verif grass.run_command('v.extract', flags='r', input='TMP_NetProcess_bati_select'+str(objpt), output='TMP_NetProcess_bati_select_cl_buff_fusio'+str(objpt), where=expr, quiet=True) # grass.run_command('v.extract', flags='d', input='TMP_NetProcess_bati_select_cl_buff_ext'+str(objpt), output='TMP_NetProcess_bati_select_cl_buff_fusio'+str(objpt), new='1', quiet=True) grass.message("suppression graph voro dans autre bati...") grass.run_command('v.overlay', flags='t', ainput='TMP_NetProcess_voroNot_'+str(objpt), atype='line', binput='TMP_NetProcess_bati_select_cl_buff_fusio'+str(objpt), output='TMP_NetProcess_voroNot_bis'+str(objpt), operator='not', quiet=True) grass.message("conversion du bati en ligne avec voro pour integration dans la couche voronoi et nettoyage...") grass.run_command('v.type', input='TMP_NetProcess_bati_extract'+str(objpt), output='TMP_NetProcess_region_bati_buff_line'+str(objpt), from_type='boundary', to_type='line', quiet=True) ''' integration des autres bati ''' grass.run_command('v.type', input='TMP_NetProcess_bati_select_cl_buff_fusio'+str(objpt), output='TMP_NetProcess_bati_select_cl_buff_line'+str(objpt), from_type='boundary', to_type='line', quiet=True) ''' on ne veux pas des lignes a linterieur du polygone du bati retrace - idem raccord enfin uniquement dans les bati.. ''' #useless grass.run_command('v.overlay', flags='t', ainput='TMP_NetProcess_bati_select_cl_buff_line'+str(objpt), atype='line', binput='TMP_NetProcess_bati_extract'+str(objpt), output='TMP_NetProcess_bati_select_cl_buff_line_not'+str(objpt), operator='not', quiet=True) ''' integration des raccords existants... ''' inforacc = grass.vector_info_topo('TP_Raccord_NetProcess_Finale') if inforacc['lines'] == 0: grass.run_command('v.patch', input=('TMP_NetProcess_voroNot_bis'+str(objpt),'TMP_NetProcess_region_bati_buff_line'+str(objpt),'TMP_NetProcess_bati_select_cl_buff_line_not'+str(objpt)), output='TMP_NetProcess_voroNot_bati_line'+str(objpt), quiet=True) else: grass.run_command('v.extract', input='TP_Raccord_NetProcess_Finale', type='line', output='TMP_NetProcess_Raccord'+str(objpt), quiet=True) grass.run_command('v.patch', input=('TMP_NetProcess_voroNot_bis'+str(objpt),'TMP_NetProcess_region_bati_buff_line'+str(objpt),'TMP_NetProcess_Raccord'+str(objpt),'TMP_NetProcess_bati_select_cl_buff_line_not'+str(objpt)), output='TMP_NetProcess_voroNot_bati_line'+str(objpt), quiet=True) grass.run_command('v.clean', flags='c', input="TMP_NetProcess_voroNot_bati_line"+str(objpt), output='TMP_NetProcess_voroNot_bati_line_cl'+str(objpt), tool='snap,break', type='area', threshold='0.1', quiet=True) ''' connection avec le bati et les chaufferie ''' grass.message("generation du 1er reseau...") grass.run_command('v.net', flags='c', input='TMP_NetProcess_voroNot_bati_line_cl'+str(objpt), points='TMP_NetProcess_pt'+str(objpt), output='TMP_NetProcess_voroNot_bati_line_cl_PT_cnct_'+str(objpt), operation='connect', threshold='10000', quiet=True) grass.run_command('v.clean', flags='c', input="TMP_NetProcess_voroNot_bati_line_cl_PT_cnct_"+str(objpt), output='TMP_NetProcess_voroNot_bati_line_cl_PT_cnct_cl'+str(objpt), tool='break', quiet=True) ''' connection avec le bati + raccord chaufferie ET le reseau en PT ''' grass.message("generation du 2nd reseau...") grass.run_command('v.net', flags='c', input='TMP_NetProcess_voroNot_bati_line_cl_PT_cnct_cl'+str(objpt), points='TMP_NetProcess_rsx_pt'+str(objpt), output='TMP_NetProcess_voroNot_bati_line_cl_PT_cnct_cl_rsx_cnct'+str(objpt), operation='connect', node_layer='3', threshold='10000', quiet=True) ''' nettoyage du reseau ''' grass.message("nettoyage du reseau...") grass.run_command('v.clean', flags='c', input="TMP_NetProcess_voroNot_bati_line_cl_PT_cnct_cl_rsx_cnct"+str(objpt), output='TMP_NetProcess_voroNot_bati_line_cl_PT_cnct_cl_rsx_cnct_cl'+str(objpt), tool='snap,break', threshold='0.1', quiet=True) ''' calcul du chemin le plus court ''' grass.message("Calcul du chemin le plus court...") # a priori pas besoin de cout pour ligne.. grass.run_command('v.net.distance', input='TMP_NetProcess_voroNot_bati_line_cl_PT_cnct_cl_rsx_cnct_cl'+str(objpt), output='TMP_NetProcess_'+str(objpt), from_layer='2', to_layer='3', quiet=True) ''' remplissage des attributs ''' grass.message("Remplissage des attributs...") grass.run_command('v.db.droptable', flags ='f', map='TMP_NetProcess_'+str(objpt), quiet=True) grass.run_command('v.db.addtable', map='TMP_NetProcess_'+str(objpt), columns="source varchar(255), id varchar(255), valid_time varchar(255)", quiet=True) grass.run_command('v.db.update', map='TMP_NetProcess_'+str(objpt), column='source', value='RACC_FORCITY'+str(objpt), quiet=True) grass.run_command('v.db.update', map='TMP_NetProcess_'+str(objpt), column='id', value='RACC_OK_'+ typeracc, quiet=True) ''' grass.run_command('v.clean', flags='c', input='TP_Raccord_NetProcess_Finale', output='TP_Raccord_NetProcess_Finale_clean', tool='break,snap,chbridge,rmsa', threshold='0,0.5', quiet=True) # grass.run_command('v.clean', flags='c', input='Raccord_prune2', output='Raccord_final10', tool='break,snap,chbridge,rmsa', threshold='0,0.5', quiet=True) ''' grass.run_command('v.patch', flags='ea', input='TMP_NetProcess_'+str(objpt), output='TP_Raccord_NetProcess_Finale', quiet=True, overwrite=True) infoPB = grass.vector_info_topo('TMP_NetProcess_'+str(objpt)) if infoPB['lines'] == 0: if typeracc == 'RCU': listerrorRCU.append(str(objpt)) elif typeracc == 'ROUTES': listerrorRTE.append(str(objpt)) grass.message("problemes : {0}".format(listerrorRCU)) grass.message("problemes : {0}".format(listerrorRTE)) ''' Nettoyage fichier temporaires ''' tempfile = ['TMP_*'] clean(tempfile) objpt+=1 ''' traitement des chaufferies posant problemes''' grass.message("traitements des erreurs") op = ' OR ' if listerrorRCU: errorRCU=[] for f in listerrorRCU: expr='NUM='+str(f) errorRCU.append(expr + op) grass.run_command('v.extract', input='TMPPP_NetProcess', output='TMPPP_NetProcess_PB_RCU', where=''.join(errorRCU)[:-4], quiet=True) grass.read_command('v.distance', flags='p', _from='TMPPP_NetProcess_PB_RCU', from_type='point', to='TMPPP_rsx_RCU', to_type='line', upload='dist', output='TP_NetProcess_PB_CONNECT_RCU', quiet=True) grass.run_command('v.db.addtable', map='TP_NetProcess_PB_CONNECT_RCU', columns="source varchar(255), id varchar(255), valid_time varchar(255)", quiet=True) grass.run_command('v.db.update', map='TP_NetProcess_PB_CONNECT_RCU', column='source', value='RACC_FORCITY', quiet=True) grass.run_command('v.db.update', map='TP_NetProcess_PB_CONNECT_RCU', column='id', value='RACC_RCU_AREVOIR', quiet=True) if listerrorRTE: errorRTE=[] for f in listerrorRTE: expr='NUM='+str(f) errorRTE.append(expr + op) grass.run_command('v.extract', input='TMPPP_NetProcess', output='TMPPP_NetProcess_PB_RTE', where=''.join(errorRTE)[:-4], quiet=True) grass.read_command('v.distance', flags='p', _from='TMPPP_NetProcess_PB_RTE', from_type='point', to='TMPPP_rsx_ROUTES', to_type='line', upload='dist', output='TP_NetProcess_PB_CONNECT_RTE', quiet=True) grass.run_command('v.db.addtable', map='TP_NetProcess_PB_CONNECT_RTE', columns="source varchar(255), id varchar(255), valid_time varchar(255)", quiet=True) grass.run_command('v.db.update', map='TP_NetProcess_PB_CONNECT_RTE', column='source', value='RACC_FORCITY', quiet=True) grass.run_command('v.db.update', map='TP_NetProcess_PB_CONNECT_RTE', column='id', value='RACC_RTE_AREVOIR', quiet=True) vectraccordPB = grass.parse_command('g.list', type="vect", pattern="TP_NetProcess_PB_CONNECT_*", quiet=True) grass.run_command('v.edit', tool='create', map='TP_Raccord_NetPB_Finale', quiet=True) grass.run_command('v.db.addtable', map='TP_Raccord_NetPB_Finale', columns="source varchar(255), id varchar(255), valid_time varchar(255)", quiet=True) for vect in vectraccordPB: grass.run_command('v.patch', flags='ea', input=vect, output='TP_Raccord_NetPB_Finale', quiet=True, overwrite=True) ''' assemblage des raccords dans une seule couche ''' vectraccordFIN = grass.parse_command('g.list', type="vect", pattern="TP_Raccord_Net*", quiet=True) grass.run_command('v.edit', tool='create', map='TP_Raccord_Finale', quiet=True) grass.run_command('v.db.addtable', map='TP_Raccord_Finale', columns="source varchar(255), id varchar(255), valid_time varchar(255)", quiet=True) for vect in vectraccordFIN: grass.run_command('v.patch', flags='ea', input=vect, output='TP_Raccord_Finale', quiet=True, overwrite=True) ''' posttraitement des raccords ''' grass.run_command('v.net', input='TP_Raccord_Finale', points='TMPP_Chauff', output='TP_Raccord_Finale_connect', operation='connect', thresh='0.1', quiet=True) grass.run_command('v.clean', input='TP_Raccord_Finale_connect', output='TP_Raccord_Finale_connect_clean', tool='break,snap', thresh='0,0.1', quiet=True) grass.run_command('v.generalize', flags='c', input='TP_Raccord_Finale_connect_clean', output='TP_Raccord_Finale_connect_clean_gene', method='lang', threshold='1', quiet=True) grass.run_command('v.clean', input='TP_Raccord_Finale_connect_clean_gene', output='TP_Raccord_Finale_connect_clean_gene_clean', tool='break,snap,rmdangle', thresh='0,0.1', quiet=True) grass.run_command('v.db.dropcolumn', map='TP_Raccord_Finale_connect_clean_gene_clean', columns='cat_', quiet=True) grass.run_command('v.patch', flags='ea', input=('TMPPP_rsx_RCU','TMPPP_rsx_ROUTES'), output='TP_Reseau', quiet=True, overwrite=True) grass.run_command('v.patch', input=('TP_Raccord_Finale_connect_clean_gene_clean','TP_Reseau'), output='TP_Reseau_RCU_complet', quiet=True) grass.run_command('v.clean', input='TP_Reseau_RCU_complet', output='Reseau_raccord', type='point,line', tool='snap,break,rmdangle,rmdupl,rmsa', thresh='0.1', quiet=True) ''' export postgis ''' SCHEM='public' grass.run_command('v.out.ogr', input='Reseau_raccord', type='line', output='PG:dbname=' + BDDNAME + ' host=' + IP + ' port=5432' + ' user='******' password='******' sslmode=require' + ' schemas=' + SCHEM, output_layer='Reseau_raccord', format='PostgreSQL') return 0
def calculateMaps(frame,nomi,abbreviation,coordDict,land,start,end,outputGates,outputGrid): all=[] for i in nomi: all.append(i.split(' > ')) for i in all: for g in i: if g=='': all[all.index(i)][i.index(g)]='EXIT' elif g=='VIEUX MOINES': all[all.index(i)][i.index(g)]='LES_VIEUX_MOINES' elif g=='CONQUET': all[all.index(i)][i.index(g)]='LE_CONQUET' elif g=='FOUR': all[all.index(i)][i.index(g)]='LE_FOUR' elif g=='SUD': all[all.index(i)][i.index(g)]='SUD_'+abbreviation elif g=='OUEST': all[all.index(i)][i.index(g)]='OUEST_'+abbreviation elif g=='NORD': all[all.index(i)][i.index(g)]='NORD_'+abbreviation elif g=='NORD_OUEST': all[all.index(i)][i.index(g)]='NORD_OUEST_'+abbreviation elif g==' CAMARET': all[all.index(i)][i.index(g)]=g.split(' ')[1] all2=[] for i in all: tmp=[] for g in i: tmp.append(g.replace(' ','_')) all2.append(tmp) for i in all2: for g in i: if g=='NORD_OUEST': all2[all2.index(i)][i.index(g)]='NORD_OUEST_'+abbreviation elif g=='TOULINGUET_': all2[all2.index(i)][i.index(g)]='TOULINGUET' fAll=[] for i in all2: for g in i: fAll.append(g) postiOc=list(OrderedDict.fromkeys(fAll)) for i in postiOc: if i=='': postiOc.pop(postiOc.index(i)) elif i=='VIEUX MOINES': postiOc[postiOc.index(i)]='LES_VIEUX_MOINES' elif i=='CONQUET': postiOc[postiOc.index(i)]='LE_CONQUET' elif i=='FOUR': postiOc[postiOc.index(i)]='LE_FOUR' elif i=='SUD': postiOc[postiOc.index(i)]='SUD_'+abbreviation elif i=='OUEST': postiOc[postiOc.index(i)]='OUEST_'+abbreviation elif i=='NORD': postiOc[postiOc.index(i)]='NORD_'+abbreviation elif i=='NORD_OUEST': postiOc[postiOc.index(i)]='NORD_OUEST_'+abbreviation for i in postiOc: if i==' CAMARET': postiOc[postiOc.index(i)]=i.split(' ')[1] postiOk=list(OrderedDict.fromkeys(postiOc)) ab=[] for i in postiOk: ab.append(i.replace(' ','_')) for i in ab: if i=='TOULINGUET_': ab[ab.index(i)]='TOULINGUET' postiOk=list(OrderedDict.fromkeys(ab)) s=open(coordDict,'r') c=s.readlines() coordD=[] tc=open('tmpCoord.csv','w') for i in postiOk: for g in c: if re.search(i,g): tc.writelines(g) coordD.append(g) tc.close() grass.run_command('g.gisenv',set='OVERWRITE=1') grass.run_command('v.in.ascii',input='tmpCoord.csv',x=2,y=3,flags='n',separator='|',output='gates') grass.run_command('g.region',vect='gates',n='n+1000',s='s-1000',e='e+1000',w='w-1000') grass.run_command('g.region',save='gates') lin1=grass.read_command('g.region',flags='pgc') north=float((lin1.split('\n')[0]).split('=')[1]) south=float((lin1.split('\n')[1]).split('=')[1]) west=float((lin1.split('\n')[2]).split('=')[1]) east=float((lin1.split('\n')[3]).split('=')[1]) eastc=float((lin1.split('\n')[-3]).split('=')[1]) northc=float((lin1.split('\n')[-2]).split('=')[1]) res=2000 he=round((north-south)/res)+1 wi=round((east-west)/res)+1 ncells=str(he)+','+str(wi) posit=str(west)+','+str(south) box1=str(res)+','+str(res) grass.run_command('v.mkgrid',map='grid1',grid=ncells,position='coor',coor=posit,box=box1) l=math.sqrt(2*res**2) px=round(((wi-1)/2),0) py=round(((he+1)/2)) if px % 2 == 0: addx=px else: addx=px+1 if py % 2 == 0: addy=py else: addy=py+1 posit2=str(west-(addx*res)+res*(wi))+','+str(south+(addy*res)-res*(he)) box2=str(l)+","+str(l) ncells2=str(he+1)+','+str(wi-1) grass.run_command('v.mkgrid',map='grid2',grid=ncells2,position='coor',coor=posit2,box=box2,angle=45) grass.run_command('v.type',input='grid1',output='grid1l',from_type='boundary',to_type='line') grass.run_command('v.type',input='grid2',output='grid2l',from_type='boundary',to_type='line') grass.run_command('v.patch',input='grid1l,grid2l',output='gridl') grass.run_command('v.overlay',ainput='gridl',binput=land,atype='line',btype='area',operator='not',output='gridOk') grass.write_command('r.mapcalc', stdin = "%s = 1" % ('region1')) grass.run_command('r.to.vect',input='region1',output='region1',type='area') grass.run_command('v.overlay',ainput='gridOk',binput='region1',atype='line',btype='area',operator='and',output='gridRegion') grass.run_command('v.clean',input='gridRegion',type='line',output='gridPruned',tool='rmdangle',thresh='400') mdist=(l/2)/(math.cos(math.radians(22.5))) grass.run_command('v.net',input='gridPruned',points='gates',output='gridPoints',operation='connect',thresh=mdist+1) grass.run_command('v.clean',input='gridPoints',type='line',output='gridSegments',tool='break') grass.run_command('v.db.droptable',map='gridSegments',flags='f') grass.run_command('v.category',input='gridSegments',output='gridNocat',option='del',cat='-1') grass.run_command('v.category',input='gridNocat',output='gridCat',option='add') grass.run_command('v.db.addtable',map='gridCat',columns='cat integer,npass integer,nameboat varchar(30)') grass.run_command('v.db.addcolumn',map='gates',columns='npass integer,nameboat varchar(30)') grass.run_command('v.db.update',map='gates',column='npass',value='0') grass.run_command('v.db.update',map='gridCat',column='npass',value='0') #create or connect to the temporal databse - vedi un try/except # grass.run_command('t.create',output='traffic',type='stvds',title='request',description='request') #grass.run_command('t.connect',database='traffic') ind=0 for i in all2: if len(i) == 1: ind += 1 gate=i[0] ngates=int((grass.read_command('db.select',flags='c',sql='select * from gates where str_1="%s"' % (gate))).split('|')[-2]) ngates=ngates+1 grass.run_command('v.db.update',map='gates',column='npass',value=ngates,where='str_1="%s"' % (gate)) elif len(i) == 2: ind += 1 gate1=i[0] gate2=i[1] g1E=float((grass.read_command('db.select',flags='c',sql='select * from gates where str_1="%s"' % (gate1))).split('|')[2]) g1N=float((grass.read_command('db.select',flags='c',sql='select * from gates where str_1="%s"' % (gate1))).split('|')[3]) g2E=float((grass.read_command('db.select',flags='c',sql='select * from gates where str_1="%s"' % (gate2))).split('|')[2]) g2N=float((grass.read_command('db.select',flags='c',sql='select * from gates where str_1="%s"' % (gate2))).split('|')[3]) p=open('tmpCoord2','w') p.writelines("1 "+str(g1E)+" "+str(g1N)+" "+str(g2E)+" "+str(g2N)) p.close() #l'informazione temporale va messa su path2 e path3 #che vanno in qualche modo salvate TUTTE (inventarsi qualcosa per i nomi) per poter visualizzare tutto insieme #e quando esistono sia un path2 che un path3 va fatto un vettoriale unico che abbia la durata dell'evento in questione grass.run_command('v.net.path',input='gridCat',output='path_'+str(ind),file='tmpCoord2',flags='s') grass.run_command('v.db.droptable',map='path_'+str(ind),flags='f') grass.run_command('v.category',input='path_'+str(ind),output='tmpPath4a',option='del') grass.run_command('v.category',input='tmpPath4a',output='path_'+str(ind),option='add') grass.run_command('v.db.addtable',map='path_'+str(ind)) alls,alle,partcoor,listcoor,listcat=[],[],[],[],[] alls=(grass.read_command('v.to.db',map='path_'+str(ind),type='line',option='start',flags='p')).split('\n')[1:-1] alle=(grass.read_command('v.to.db',map='path_'+str(ind),type='line',option='end',flags='p')).split('\n')[1:-1] #register the map in the stvds trafic startP=start[ind-1] endP=end[ind-1] # grass.run_command('t.register',input='traffic',maps='path_'+str(ind),type='vect',start=startP,end=endP) allse=[] e=0 for i in alls: allse.append(i) allse.append(alle[e]) e=e+1 s=0 for i in range(0,len(allse)): try: if s==0: sx1=round(float(allse[s].split('|')[1]),0) sy1=round(float(allse[s].split('|')[2]),0) s=s+1 else: sx2=round(float(allse[s].split('|')[1]),0) sy2=round(float(allse[s].split('|')[2]),0) if sx1==sx2 and sy1==sy2: allse.pop(s) else: sx1,sy1=sx2,sy2 s=s+1 except IndexError: break s=0 for i in allse: if s==0: sx1=round(float(allse[s].split('|')[1]),4) sy1=round(float(allse[s].split('|')[2]),4) s=s+1 else: sx2=round(float(allse[s].split('|')[1]),4) sy2=round(float(allse[s].split('|')[2]),4) if sx1==sx2 and sy1==sy2: allse.pop(s) s=s+1 else: sx1,sy1=sx2,sy2 s=s+1 for i in range(0,len(allse)-1): listcoor.append(str((float(allse[i].split('|')[1])+float(allse[i+1].split('|')[1]))/2)+","+str((float(allse[i].split('|')[2])+float(allse[i+1].split('|')[2]))/2)) for i in listcoor: cat=int((grass.read_command('v.what',map='gridCat',type='line',coordinates=i,distance='2',flags='g').split('\n')[-2]).split('=')[1]) nlines=int((grass.read_command('db.select',flags='c',sql='select * from gridCat where cat="%s"' % (cat))).split('|')[-2]) nlines=nlines+1 grass.run_command('v.db.update',map='gridCat',column='npass',value=nlines,where='cat="%s"' % (cat)) elif len(i) == 3: ind += 1 gate1=i[0] gate2=i[1] gate3=i[2] g1E=float((grass.read_command('db.select',flags='c',sql='select * from gates where str_1="%s"' % (gate1))).split('|')[2]) g1N=float((grass.read_command('db.select',flags='c',sql='select * from gates where str_1="%s"' % (gate1))).split('|')[3]) g2E=float((grass.read_command('db.select',flags='c',sql='select * from gates where str_1="%s"' % (gate2))).split('|')[2]) g2N=float((grass.read_command('db.select',flags='c',sql='select * from gates where str_1="%s"' % (gate2))).split('|')[3]) p=open('tmpCoord2','w') p.writelines("1 "+str(g1E)+" "+str(g1N)+" "+str(g2E)+" "+str(g2N)) p.close() #l'informazione temporale va messa su path2 e path3 #che vanno in qualche modo salvate TUTTE (inventarsi qualcosa per i nomi) per poter visualizzare tutto insieme #e quando esistono sia un path2 che un path3 va fatto un vettoriale unico che abbia la durata dell'evento in questione grass.run_command('v.net.path',input='gridCat',output='path_'+str(ind),file='tmpCoord2',flags='s') grass.run_command('v.db.droptable',map='path_'+str(ind),flags='f') grass.run_command('v.category',input='path_'+str(ind),output='tmpPath4a',option='del') grass.run_command('v.category',input='tmpPath4a',output='path_'+str(ind),option='add') grass.run_command('v.db.addtable',map='path_'+str(ind)) alls,alle,partcoor,listcoor,listcat=[],[],[],[],[] alls=(grass.read_command('v.to.db',map='path_'+str(ind),type='line',option='start',flags='p')).split('\n')[1:-1] alle=(grass.read_command('v.to.db',map='path_'+str(ind),type='line',option='end',flags='p')).split('\n')[1:-1] allse=[] e=0 for i in alls: allse.append(i) allse.append(alle[e]) e=e+1 s=0 for i in range(0,len(allse)): try: if s==0: sx1=round(float(allse[s].split('|')[1]),0) sy1=round(float(allse[s].split('|')[2]),0) s=s+1 else: sx2=round(float(allse[s].split('|')[1]),0) sy2=round(float(allse[s].split('|')[2]),0) if sx1==sx2 and sy1==sy2: allse.pop(s) else: sx1,sy1=sx2,sy2 s=s+1 except IndexError: break s=0 for i in allse: if s==0: sx1=round(float(allse[s].split('|')[1]),4) sy1=round(float(allse[s].split('|')[2]),4) s=s+1 else: sx2=round(float(allse[s].split('|')[1]),4) sy2=round(float(allse[s].split('|')[2]),4) if sx1==sx2 and sy1==sy2: allse.pop(s) s=s+1 else: sx1,sy1=sx2,sy2 s=s+1 for i in range(0,len(allse)-1): listcoor.append(str((float(allse[i].split('|')[1])+float(allse[i+1].split('|')[1]))/2)+","+str((float(allse[i].split('|')[2])+float(allse[i+1].split('|')[2]))/2)) for i in listcoor: cat=int((grass.read_command('v.what',map='gridCat',type='line',coordinates=i,distance='2',flags='g').split('\n')[-2]).split('=')[1]) nlines=int((grass.read_command('db.select',flags='c',sql='select * from gridCat where cat="%s"' % (cat))).split('|')[-2]) nlines=nlines+1 grass.run_command('v.db.update',map='gridCat',column='npass',value=nlines,where='cat="%s"' % (cat)) g3E=float((grass.read_command('db.select',flags='c',sql='select * from gates where str_1="%s"' % (gate3))).split('|')[2]) g3N=float((grass.read_command('db.select',flags='c',sql='select * from gates where str_1="%s"' % (gate3))).split('|')[3]) p=open('tmpCoord3','w') p.writelines("1 "+str(g2E)+" "+str(g2N)+" "+str(g3E)+" "+str(g3N)) p.close() grass.run_command('v.net.path',input='gridCat',output='path2_'+str(ind),file='tmpCoord3',flags='s') grass.run_command('v.db.droptable',map='path2_'+str(ind),flags='f') grass.run_command('v.category',input='path2_'+str(ind),output='tmpPath6a',option='del') grass.run_command('v.category',input='tmpPath6a',output='path2_'+str(ind),option='add') grass.run_command('v.db.addtable',map='path2_'+str(ind)) alls,alle,partcoor,listcoor,listcat=[],[],[],[],[] alls=(grass.read_command('v.to.db',map='path2_'+str(ind),type='line',option='start',flags='p')).split('\n')[1:-1] alle=(grass.read_command('v.to.db',map='path2_'+str(ind),type='line',option='end',flags='p')).split('\n')[1:-1] allse=[] e=0 for i in alls: allse.append(i) allse.append(alle[e]) e=e+1 s=0 for i in range(0,len(allse)): try: if s==0: sx1=round(float(allse[s].split('|')[1]),0) sy1=round(float(allse[s].split('|')[2]),0) s=s+1 else: sx2=round(float(allse[s].split('|')[1]),0) sy2=round(float(allse[s].split('|')[2]),0) if sx1==sx2 and sy1==sy2: allse.pop(s) else: sx1,sy1=sx2,sy2 s=s+1 except IndexError: break s=0 for i in allse: if s==0: sx1=round(float(allse[s].split('|')[1]),4) sy1=round(float(allse[s].split('|')[2]),4) s=s+1 else: sx2=round(float(allse[s].split('|')[1]),4) sy2=round(float(allse[s].split('|')[2]),4) if sx1==sx2 and sy1==sy2: allse.pop(s) s=s+1 else: sx1,sy1=sx2,sy2 s=s+1 for i in range(0,len(allse)-1): listcoor.append(str((float(allse[i].split('|')[1])+float(allse[i+1].split('|')[1]))/2)+","+str((float(allse[i].split('|')[2])+float(allse[i+1].split('|')[2]))/2)) for i in listcoor: cat=int((grass.read_command('v.what',map='gridCat',type='line',coordinates=i,distance='2',flags='g').split('\n')[-2]).split('=')[1]) nlines=int((grass.read_command('db.select',flags='c',sql='select * from gridCat where cat="%s"' % (cat))).split('|')[-2]) nlines=nlines+1 grass.run_command('v.db.update',map='gridCat',column='npass',value=nlines,where='cat="%s"' % (cat)) #unisco path_ind e path2_ind in un unico path_ind per poi potergli assegnare un'unica informazione temporale grass.run_command('v.patch',input='path_'+str(ind)+',path2_'+str(ind),output='path_tmp') grass.run_command('g.rename',vect='path_tmp,path_'+str(ind)) #register the map in the stvds traffic startPo=start[ind-1] endPo=end[ind-1] # grass.run_command('t.register',input='traffic',maps='path_'+str(ind),type='vect',start=startPo,end=endPo) grass.run_command('g.rename',vect='gates,%s' % (outputGates+'_'+str(frame))) grass.run_command('g.rename',vect='gridCat,%s' % (outputGrid+'_'+str(frame))) print "grid and gates prodotti nei rispettivi mapsets?" grass.run_command('g.remove',vect='gates,gridCat') grass.run_command('g.remove',vect='grid1,grid1l,gridl,grid2,grid2l,gridNocat,gridPoints,gridPruned,gridRegion,region1,gridOk,gridSegments,tmpPath2,tmpPath3,tmpPath4a,tmpPath6a') for i in grass.read_command('g.list',type='vect').split('\n')[2:]: for j in i.split(' '): if re.match('path2_',j): grass.run_command('g.remove',vect=j)
def main(): global tmpmap tmpmap = None map = options['map'] zero = flags['z'] bands = flags['b'] if not zero: s = gscript.read_command('r.univar', flags='g', map=map) kv = gscript.parse_key_val(s) global mean, stddev mean = float(kv['mean']) stddev = float(kv['stddev']) if not bands: # smooth free floating blue/white/red rules = '\n'.join(["0% blue", "%f blue" % z(-2), "%f white" % mean, "%f red" % z(+2), "100% red"]) else: # banded free floating black/red/yellow/green/yellow/red/black # reclass with labels only works for category (integer) based maps # r.reclass input="$GIS_OPT_MAP" output="${GIS_OPT_MAP}.stdevs" << # EOF # >3 S.D. outliers colored black so they show up in d.histogram w/ white background rules = '\n'.join(["0% black", "%f black" % z(-3), "%f red" % z(-3), "%f red" % z(-2), "%f yellow" % z(-2), "%f yellow" % z(-1), "%f green" % z(-1), "%f green" % z(+1), "%f yellow" % z(+1), "%f yellow" % z(+2), "%f red" % z(+2), "%f red" % z(+3), "%f black" % z(+3), "100% black"]) else: tmpmap = "r_col_stdev_abs_%d" % os.getpid() gscript.mapcalc("$tmp = abs($map)", tmp=tmpmap, map=map) # data centered on 0 (e.g. map of deviations) info = gscript.raster_info(tmpmap) maxv = info['max'] # current r.univar truncates percentage to the base integer s = gscript.read_command('r.univar', flags='eg', map=map, percentile=[95.45, 68.2689, 99.7300]) kv = gscript.parse_key_val(s) stddev1 = float(kv['percentile_68_2689']) stddev2 = float(kv['percentile_95_45']) stddev3 = float(kv['percentile_99_73']) if not bands: # zero centered smooth blue/white/red rules = '\n'.join(["%f blue" % -maxv, "%f blue" % -stddev2, "0 white", "%f red" % stddev2, "%f red" % maxv]) else: # zero centered banded black/red/yellow/green/yellow/red/black # >3 S.D. outliers colored black so they show up in d.histogram w/ white background rules = '\n'.join(["%f black" % -maxv, "%f black" % -stddev3, "%f red" % -stddev3, "%f red" % -stddev2, "%f yellow" % -stddev2, "%f yellow" % -stddev1, "%f green" % -stddev1, "%f green" % stddev1, "%f yellow" % stddev1, "%f yellow" % stddev2, "%f red" % stddev2, "%f red" % stddev3, "%f black" % stddev3, "%f black" % maxv, ]) gscript.write_command('r.colors', map=map, rules='-', stdin=rules)
def main(): vector = options['map'] table = options['table'] layer = options['layer'] columns = options['columns'] key = options['key'] # does map exist in CURRENT mapset? mapset = grass.gisenv()['MAPSET'] if not grass.find_file(vector, element = 'vector', mapset = mapset)['file']: grass.fatal(_("Vector map <%s> not found in current mapset") % vector) map_name = vector.split('@')[0] if not table: if layer == '1': grass.verbose(_("Using vector map name as table name: <%s>") % map_name) table = map_name else: # to avoid tables with identical names on higher layers table = "%s_%s" % (map_name, layer) grass.verbose(_("Using vector map name extended by layer number as table name: <%s>") % table) else: grass.verbose(_("Using user specified table name: %s") % table) # check if DB parameters are set, and if not set them. grass.run_command('db.connect', flags = 'c') grass.verbose(_("Creating new DB connection based on default mapset settings...")) kv = grass.db_connection() database = kv['database'] driver = kv['driver'] # maybe there is already a table linked to the selected layer? nuldev = file(os.devnull, 'w') try: grass.vector_db(map_name, stderr = nuldev)[int(layer)] grass.fatal(_("There is already a table linked to layer <%s>") % layer) except KeyError: pass # maybe there is already a table with that name? tables = grass.read_command('db.tables', flags = 'p', database = database, driver = driver, stderr = nuldev) if not table in tables.splitlines(): if columns: column_def = map(lambda x: x.strip().lower(), columns.strip().split(',')) else: column_def = [] # if not existing, create it: column_def_key = "%s integer" % key if column_def_key not in column_def: column_def.insert(0, column_def_key) column_def = ','.join(column_def) grass.verbose(_("Creating table with columns (%s)...") % column_def) sql = "CREATE TABLE %s (%s)" % (table, column_def) if grass.write_command('db.execute', input = '-', database = database, driver = driver, stdin = sql) != 0: grass.fatal(_("Unable to create table <%s>") % table) # connect the map to the DB: grass.run_command('v.db.connect', quiet = True, map = map_name, database = database, driver = driver, layer = layer, table = table, key = key) # finally we have to add cats into the attribute DB to make modules such as v.what.rast happy: # (creates new row for each vector line): grass.run_command('v.to.db', map = map_name, layer = layer, option = 'cat', column = key, qlayer = layer) grass.verbose(_("Current attribute table links:")) if grass.verbosity() > 2: grass.run_command('v.db.connect', flags = 'p', map = map_name) # write cmd history: grass.vector_history(map_name) return 0