def getLayerInfo(layer, ltype): #print layer, ltype if ltype == "raster": info = grass.read_command('r.info', map=layer) if ltype == "vector": info = grass.read_command('v.info', map=layer) return info
def polylines(points_map, output, env): """Cluster points and connect points by line in each cluster""" tmp_cluster = 'tmp_cluster' gcore.run_command('v.cluster', flags='t', input=points_map, min=3, layer='3', output=tmp_cluster, method='optics', env=env) cats = gcore.read_command('v.category', input=tmp_cluster, layer=3, option='print', env=env).strip() cats = list(set(cats.split())) line = '' for cat in cats: point_list = [] distances = {} points = gcore.read_command('v.out.ascii', input=tmp_cluster, layer=3, type='point', cats=cat, format='point', env=env).strip().split() for point in points: point = point.split('|')[:2] point_list.append((float(point[0]), float(point[1]))) for i, point1 in enumerate(point_list[:-1]): for point2 in point_list[i + 1:]: distances[(point1, point2)] = sqrt((point1[0] - point2[0]) * (point1[0] - point2[0]) + (point1[1] - point2[1]) * (point1[1] - point2[1])) ordered = sorted(distances.items(), key=lambda x: x[1])[:len(points) - 1] for key, value in ordered: line += 'L 2 1\n' line += '{x} {y}\n'.format(x=key[0][0], y=key[0][1]) line += '{x} {y}\n'.format(x=key[1][0], y=key[1][1]) line += '1 {cat}\n\n'.format(cat=cat) gcore.write_command('v.in.ascii', input='-', stdin=line, output=output, format='standard', flags='n', env=env) gcore.run_command('v.to.rast', input=output, output=output, type='line', use='cat', env=env)
def _computeRegions(self, count, startRegion, endRegion=None, zoomValue=None): """Computes regions based on start region and end region or zoom value for each of the animation frames.""" region = dict(gcore.region()) # cast to dict, otherwise deepcopy error if startRegion: region = dict( parse_key_val( gcore.read_command("g.region", flags="gu", region=startRegion), val_type=float, )) del region["cells"] del region["cols"] del region["rows"] if "projection" in region: del region["projection"] if "zone" in region: del region["zone"] regions = [] for i in range(self._mapCount): regions.append(copy.copy(region)) self._regions = regions if not (endRegion or zoomValue): return startRegionDict = parse_key_val( gcore.read_command("g.region", flags="gu", region=startRegion), val_type=float, ) if endRegion: endRegionDict = parse_key_val( gcore.read_command("g.region", flags="gu", region=endRegion), val_type=float, ) for key in ("n", "s", "e", "w", "nsres", "ewres"): values = interpolate(startRegionDict[key], endRegionDict[key], self._mapCount) for value, region in zip(values, regions): region[key] = value elif zoomValue: for i in range(self._mapCount): regions[i]["n"] -= zoomValue[0] * i regions[i]["e"] -= zoomValue[1] * i regions[i]["s"] += zoomValue[0] * i regions[i]["w"] += zoomValue[1] * i # handle cases when north < south and similarly EW if (regions[i]["n"] < regions[i]["s"] or regions[i]["e"] < regions[i]["w"]): regions[i] = regions[i - 1] self._regions = regions
def main(): group = options['group'] raster = options['raster'] output = options['output'] coords = options['coordinates'] img_fmt = options['format'] coord_legend = flags['c'] gnuplot = flags['g'] global tmp_dir tmp_dir = grass.tempdir() if not group and not raster: grass.fatal(_("Either group= or raster= is required")) if group and raster: grass.fatal(_("group= and raster= are mutually exclusive")) # check if gnuplot is present if gnuplot and not grass.find_program('gnuplot', '-V'): grass.fatal(_("gnuplot required, please install first")) # get data from group listing and set the x-axis labels if group: # Parse the group list output s = grass.read_command('i.group', flags='g', group=group, quiet=True) rastermaps = s.splitlines() else: # get data from list of files and set the x-axis labels rastermaps = raster.split(',') xlabels = ["'%s' %d" % (n, i + 1) for i, n in enumerate(rastermaps)] xlabels = ','.join(xlabels) # get y-data for gnuplot-data file what = [] s = grass.read_command('r.what', map=rastermaps, coordinates=coords, null='0', quiet=True) if len(s) == 0: grass.fatal(_('No data returned from query')) for l in s.splitlines(): f = l.split('|') for i, v in enumerate(f): if v in ['', '*']: f[i] = 0 else: f[i] = float(v) what.append(f) # build data files if gnuplot: draw_gnuplot(what, xlabels, output, img_fmt, coord_legend) else: draw_linegraph(what)
def _computeRegions(self, width, height, count, startRegion, endRegion=None, zoomValue=None): """Computes regions based on start region and end region or zoom value for each of the animation frames.""" currRegion = dict( gcore.region()) # cast to dict, otherwise deepcopy error del currRegion['cells'] del currRegion['cols'] del currRegion['rows'] regions = [] for i in range(self._mapCount): if endRegion or zoomValue: regions.append(copy.copy(currRegion)) else: regions.append(None) if not startRegion: self._regions = regions return startRegionDict = parse_key_val(gcore.read_command('g.region', flags='gu', region=startRegion), val_type=float) if endRegion: endRegionDict = parse_key_val(gcore.read_command('g.region', flags='gu', region=endRegion), val_type=float) for key in ('n', 's', 'e', 'w'): values = interpolate(startRegionDict[key], endRegionDict[key], self._mapCount) for value, region in zip(values, regions): region[key] = value elif zoomValue: for i in range(self._mapCount): regions[i]['n'] -= zoomValue[0] * i regions[i]['e'] -= zoomValue[1] * i regions[i]['s'] += zoomValue[0] * i regions[i]['w'] += zoomValue[1] * i # handle cases when north < south and similarly EW if regions[i]['n'] < regions[i]['s'] or \ regions[i]['e'] < regions[i]['w']: regions[i] = regions[i - 1] for region in regions: mapwidth = abs(region['e'] - region['w']) mapheight = abs(region['n'] - region['s']) region['nsres'] = mapheight / height region['ewres'] = mapwidth / width self._regions = regions
def _computeRegions(self, count, startRegion, endRegion=None, zoomValue=None): """Computes regions based on start region and end region or zoom value for each of the animation frames.""" region = dict(gcore.region()) # cast to dict, otherwise deepcopy error if startRegion: region = dict( parse_key_val(gcore.read_command('g.region', flags='gu', region=startRegion), val_type=float)) del region['cells'] del region['cols'] del region['rows'] if 'projection' in region: del region['projection'] if 'zone' in region: del region['zone'] regions = [] for i in range(self._mapCount): regions.append(copy.copy(region)) self._regions = regions if not (endRegion or zoomValue): return startRegionDict = parse_key_val(gcore.read_command('g.region', flags='gu', region=startRegion), val_type=float) if endRegion: endRegionDict = parse_key_val(gcore.read_command('g.region', flags='gu', region=endRegion), val_type=float) for key in ('n', 's', 'e', 'w', 'nsres', 'ewres'): values = interpolate(startRegionDict[key], endRegionDict[key], self._mapCount) for value, region in zip(values, regions): region[key] = value elif zoomValue: for i in range(self._mapCount): regions[i]['n'] -= zoomValue[0] * i regions[i]['e'] -= zoomValue[1] * i regions[i]['s'] += zoomValue[0] * i regions[i]['w'] += zoomValue[1] * i # handle cases when north < south and similarly EW if regions[i]['n'] < regions[i]['s'] or \ regions[i]['e'] < regions[i]['w']: regions[i] = regions[i - 1] self._regions = regions
def polygons(points_map, output, env): """Clusters markers together and creates polygons. Requires GRASS 7.1.""" tmp_cluster = 'tmp_cluster' tmp_hull = 'tmp_hull' gcore.run_command('v.cluster', flags='t', input=points_map, min=3, layer='3', output=tmp_cluster, method='optics', env=env) cats = gcore.read_command('v.category', input=tmp_cluster, layer='3', option='print', env=env).strip().split() cats_list = list(set(cats)) cats_dict = dict([(x, cats.count(x)) for x in cats_list]) for cat in cats_list: if cats_dict[cat] > 2: gcore.run_command('v.hull', input=tmp_cluster, output=tmp_hull + "_%s" % cat, cats=cat, layer='3', env=env) elif cats_dict[cat] == 2: points = gcore.read_command('v.out.ascii', input=tmp_cluster, format='point', separator='space', layer='3', cats=cat, env=env).strip().splitlines() ascii = 'L 2 1\n' + points[0] + '\n' + points[1] + '\n' + '1 1' gcore.write_command('v.in.ascii', format='standard', input='-', flags='n', output=tmp_hull + '_%s' % cat, stdin=ascii, env=env) gcore.run_command('v.patch', input=[tmp_hull + '_%s' % cat for cat in cats_list], output=output, env=env) gcore.run_command('v.to.rast', input=output, output=output, type='area,line', use='val', value=1, env=env)
def test_fromtable(self): self.runModule('db.copy', from_database=self.orig_mapset, from_table=self.invect, to_table=self.outable, overwrite=True) orig = read_command('db.select', table=self.invect, database=self.orig_mapset) new = read_command('db.select', table=self.outable) self.assertEqual(first=orig, second=new)
def main(): group = options['group'] raster = options['raster'] output = options['output'] coords = options['east_north'] label = flags['c'] gnuplot = flags['g'] if not group and not raster: grass.fatal(_("Either group= or raster= is required")) if group and raster: grass.fatal(_("group= and raster= are mutually exclusive")) #check if present if gnuplot and not grass.find_program('gnuplot', ['-V']): grass.fatal(_("gnuplot required, please install first")) tmp1 = grass.tempfile() tmp2 = grass.tempfile() # get y-data for gnuplot-data file # get data from group files and set the x-axis labels if group: # ## PARSES THE GROUP FILES - gets rid of ugly header info from group list output s = grass.read_command('i.group', flags='g', group = group, quiet = True) rastermaps = s.splitlines() else: # ## get data from list of files and set the x-axis labels rastermaps = raster.split(',') xlabels = ["'%s' %d" % (n, i + 1) for i, n in enumerate(rastermaps)] xlabels = ','.join(xlabels) numbands = len(rastermaps) what = [] s = grass.read_command('r.what', input = rastermaps, east_north = coords, quiet = True) for l in s.splitlines(): f = l.split('|') for i, v in enumerate(f): if v in ['', '*']: f[i] = 0 else: f[i] = float(v) what.append(f) # build data files if gnuplot: draw_gnuplot(what, xlabels, output, label) else: draw_linegraph(what)
def test_where(self): self.runModule('db.copy', from_database=self.orig_mapset, to_table=self.outable, overwrite=True, from_table=self.invect, where="NAME='RALEIGH'") orig = read_command( 'db.select', database=self.orig_mapset, sql="SELECT * from {inp} WHERE NAME='RALEIGH'".format( inp=self.invect)) new = read_command('db.select', table=self.outable) self.assertEqual(first=orig, second=new)
def patch_analysis_per_subregion(development_diff, subregions, threshold, tmp_clump, tmp_clump_cat): gcore.run_command('r.clump', input=development_diff, output=tmp_clump, overwrite=True, quiet=True) cats = gcore.read_command("r.describe", flags="1", map=subregions, quiet=True).strip().splitlines() subregions_data = {} env = os.environ.copy() for cat in cats: grast.mapcalc('{new} = if ({reg} == {cat}, {clump}, null())'.format(new=tmp_clump_cat, reg=subregions, cat=cat, clump=tmp_clump), overwrite=True) env['GRASS_REGION'] = gcore.region_env(zoom=tmp_clump_cat) data = gcore.read_command('r.object.geometry', input=tmp_clump_cat, flags='m', separator='comma', env=env, quiet=True).strip() data = np.loadtxt(StringIO(data), delimiter=',', usecols=(1, 2), skiprows=1) subregions_data[cat] = data[data[:, 0] > threshold] return subregions_data
def _computeRegions(self, width, height, count, startRegion, endRegion=None, zoomValue=None): """Computes regions based on start region and end region or zoom value for each of the animation frames.""" currRegion = dict(gcore.region()) # cast to dict, otherwise deepcopy error del currRegion['cells'] del currRegion['cols'] del currRegion['rows'] regions = [] for i in range(self._mapCount): if endRegion or zoomValue: regions.append(copy.copy(currRegion)) else: regions.append(None) if not startRegion: self._regions = regions return startRegionDict = parse_key_val(gcore.read_command('g.region', flags='gu', region=startRegion), val_type=float) if endRegion: endRegionDict = parse_key_val(gcore.read_command('g.region', flags='gu', region=endRegion), val_type=float) for key in ('n', 's', 'e', 'w'): values = interpolate(startRegionDict[key], endRegionDict[key], self._mapCount) for value, region in zip(values, regions): region[key] = value elif zoomValue: for i in range(self._mapCount): regions[i]['n'] -= zoomValue[0] * i regions[i]['e'] -= zoomValue[1] * i regions[i]['s'] += zoomValue[0] * i regions[i]['w'] += zoomValue[1] * i # handle cases when north < south and similarly EW if regions[i]['n'] < regions[i]['s'] or \ regions[i]['e'] < regions[i]['w']: regions[i] = regions[i - 1] for region in regions: mapwidth = abs(region['e'] - region['w']) mapheight = abs(region['n'] - region['s']) region['nsres'] = mapheight / height region['ewres'] = mapwidth / width self._regions = regions
def viewshed(scanned_elev, output, vector, visible_color, invisible_color, obs_elev=1.7, env=None): coordinates = gcore.read_command('v.out.ascii', input=vector, separator=',', env=env).strip() coordinate = None for line in coordinates.split(os.linesep): try: coordinate = [float(c) for c in line.split(',')[0:2]] except ValueError: # no points in map pass break if coordinate: gcore.run_command('r.viewshed', flags='b', input=scanned_elev, output=output, coordinates=coordinate, observer_elevation=obs_elev, env=env) gcore.run_command('r.null', map=output, null=0, env=env) gcore.write_command('r.colors', map=output, rules='-', stdin='0 {invis}\n1 {vis}'.format( vis=visible_color, invis=invisible_color), env=env)
def patch_analysis(development_diff, threshold, tmp_clump): gcore.run_command("r.clump", input=development_diff, output=tmp_clump, overwrite=True, quiet=True) try: data = gcore.read_command( "r.object.geometry", input=tmp_clump, flags="m", separator="comma", quiet=True, ).strip() data = np.loadtxt(StringIO(data), delimiter=",", usecols=(1, 2), skiprows=1) # in case there is just one record data = data.reshape((-1, 2)) data = data[data[:, 0] > threshold] except CalledModuleError: gcore.warning("No changes in development, no patches found.") data = np.empty([0, 2]) return data
def test_ele(self): self.runModule('r.geomorphon', elevation=self.inele, forms=self.outele, search=10) category = read_command('r.category', map=self.outele) self.assertEqual(first=ele_out, second=category)
def test_text_delimeter(self): """Test loading CSV with text delimiter Text delimiter added in r63581 """ self.assertModule( "v.in.ascii", input="-", output=self.xyvector, separator="comma", text="doublequote", skip=1, x=2, y=3, cat=1, columns="cat int, x double, y double," " ed_cat varchar(20), field_estimate varchar(20)", stdin_=INPUT_DOUBLEQUOTES, ) category = read_command("v.db.select", map=self.xyvector, separator="pipe") self.assertEqual( first=TABLE_1.replace("\n", os.linesep), second=category, msg="Attribute table has wrong entries", )
def test_uncommon_delims(self): """Test loading CSV with uncommon delimiters""" self.assertModule( "v.in.ascii", input="-", output=self.xyvector, separator="@", text="^", skip=1, x=2, y=3, cat=1, columns="cat int, x double, y double," " ed_cat varchar(20), field_estimate varchar(20)", stdin_=INPUT_UNCOMMON, ) category = read_command("v.db.select", map=self.xyvector, separator="pipe") self.assertEqual( first=TABLE_1.replace("\n", os.linesep), second=category, msg="Attribute table has wrong entries", )
def test_tsv(self): """Test loading TSV (CSV with tab as delim) Using double quote character for quote. """ self.assertModule( "v.in.ascii", input="-", output=self.xyvector, separator="tab", text='"', skip=1, x=2, y=3, cat=1, columns="cat int, x double, y double," " ed_cat varchar(20), field_estimate varchar(20)", stdin_=INPUT_TSV, ) category = read_command("v.db.select", map=self.xyvector, separator="pipe") self.assertEqual( first=TABLE_1.replace("\n", os.linesep), second=category, msg="Attribute table has wrong entries", )
def project(file, source, dest): """Projects point (x, y) using projector""" errors = 0 points = [] try: ret = gcore.read_command( "m.proj", quiet=True, flags="d", proj_in=source["proj"], proj_out=dest["proj"], sep=";", input=file, ) ret = decode(ret) except CalledModuleError: gcore.fatal(cs2cs + " failed") if not ret: gcore.fatal(cs2cs + " failed") for line in ret.splitlines(): if "*" in line: errors += 1 else: p_x2, p_y2, p_z2 = list(map(float, line.split(";"))) points.append((p_x2 / dest["scale"], p_y2 / dest["scale"])) return points, errors
def project(file, source, dest): """Projects point (x, y) using projector""" errors = 0 points = [] try: ret = gcore.read_command('m.proj', quiet=True, flags='d', proj_in=source['proj'], proj_out=dest['proj'], sep=';', input=file) except CalledModuleError: gcore.fatal(cs2cs + ' failed') if not ret: gcore.fatal(cs2cs + ' failed') for line in ret.splitlines(): if "*" in line: errors += 1 else: p_x2, p_y2, p_z2 = list(map(float, line.split(';'))) points.append((p_x2 / dest['scale'], p_y2 / dest['scale'])) return points, errors
def subsurface_slice(points, voxel, slice_, axes, slice_line, units, offset, env): topo = gvect.vector_info_topo(points) if topo: if topo['points'] != 2: grast.mapcalc(exp=slice_ + " = null()", overwrite=True) return coordinates = gcore.read_command('v.out.ascii', input=points, format='point', separator=',', env=env).strip() coords_list = [] i = 0 for coords in coordinates.split(os.linesep): coords_list.extend(coords.split(',')[:2]) i += 1 if i >= 2: break if axes: gcore.run_command('db.droptable', flags='f', table=axes, env=env) gcore.run_command('r3.slice', overwrite=True, input=voxel, output=slice_, coordinates=','.join(coords_list), axes=axes, slice_line=slice_line, units=units, offset=offset, env=env)
def test_sint(self): self.runModule('r.geomorphon', elevation=self.insint, forms=self.outsint, search=10) category = read_command('r.category', map=self.outsint) self.assertEqual(first=synth_out, second=category)
def main(): options, flags = gcore.parser() probability = options['probability'] output = options['output'] count = int(options['count']) gcore.use_temp_region() # probability map probab_01 = 'probability_01_' + str(os.getpid()) TMP_RAST.append(probab_01) info = grast.raster_info(probability) gcore.write_command('r.recode', flags='d', input=probability, output=probab_01, title="Recoded probability map to 0 to 1", rules='-', stdin='{minim}:{maxim}:0:1'.format(minim=info['min'], maxim=info['max'])) mean = gcore.parse_key_val(gcore.read_command('r.univar', map=probab_01, flags='g'), val_type=float)['mean'] resolution = count / (mean * (info['north'] - info['south'] + info['east'] - info['west'])) resolution = sqrt((mean * (info['north'] - info['south']) * (info['east'] - info['west'])) / count) gcore.run_command('g.region', res=resolution) random_name = 'random_' + str(os.getpid()) point_map = 'points_' + str(os.getpid()) point_grid = 'points_' + str(os.getpid()) TMP_RAST.append(random_name) TMP_RAST.append(point_map) TMP_VECT.append(point_grid) gcore.run_command('r.surf.random', output=random_name, min=0, max=1) grast.mapcalc(exp='{point_map} = if({rand} <= {prob}, 1, null())'.format(rand=random_name, prob=probab_01, point_map=point_map)) gcore.run_command('r.to.vect', flags='t', input=point_map, output=point_grid, type='point') gcore.run_command('v.perturb', input=point_grid, output=output, parameter=resolution / 2., seed=os.getpid())
def drain(elevation, point, drain, conditioned, env): data = gcore.read_command('v.out.ascii', input=point, format='point', env=env).strip() if data: x, y, cat = data.split('|') if conditioned: gcore.run_command('r.hydrodem', input=elevation, output=conditioned, mod=50, size=50, flags='a', env=env) gcore.run_command('r.drain', input=conditioned, output=drain, drain=drain, start_coordinates='{},{}'.format(x, y), env=env) else: gcore.run_command('r.drain', input=elevation, output=drain, drain=drain, start_coordinates='{},{}'.format(x, y), env=env) else: gcore.run_command('v.edit', map=drain, tool='create', env=env)
def _createNewMap(self, mapName, backgroundMap, mapType): name = mapName.split('@')[0] background = backgroundMap.split('@')[0] types = {'CELL': 'int', 'FCELL': 'float', 'DCELL': 'double'} if background: back = background else: back = 'null()' try: grast.mapcalc(exp="{name} = {mtype}({back})".format(name=name, mtype=types[mapType], back=back), overwrite=True, quiet=True) if background: self._backgroundRaster = backgroundMap if mapType == 'CELL': values = gcore.read_command('r.describe', flags='1n', map=backgroundMap, quiet=True).strip() if values: self.uploadMapCategories.emit(values=values.split('\n')) except CalledModuleError: raise ScriptError self._backupRaster(name) name = name + '@' + gcore.gisenv()['MAPSET'] self._editedRaster = name self.newRasterCreated.emit(name=name)
def _getOGRFormats(): """!Get dictionary of avaialble OGR drivers""" ret = grass.read_command('v.in.ogr', quiet = True, flags = 'f') return _parseFormats(ret), _parseFormats(ret, writableOnly = True)
def analyse_subregion(params): tmp_clump_cat, subregions, cat, clump, threshold = params grast.mapcalc( "{new} = if ({reg} == {cat}, {clump}, null())".format( new=tmp_clump_cat, reg=subregions, cat=cat, clump=clump), overwrite=True, ) env = os.environ.copy() env["GRASS_REGION"] = gcore.region_env(zoom=tmp_clump_cat) try: data = gcore.read_command( "r.object.geometry", input=tmp_clump_cat, flags="m", separator="comma", env=env, quiet=True, ).strip() data = np.loadtxt(StringIO(data), delimiter=",", usecols=(1, 2), skiprows=1) # in case there is just one record data = data.reshape((-1, 2)) return data[data[:, 0] > threshold] except CalledModuleError: gcore.warning( "Subregion {cat} has no changes in development, no patches found.". format(cat=cat)) return np.empty([0, 2])
def SetGeorefAndProj(self): """Set georeference and projection to target file """ projection = grass.read_command('g.proj', flags='wf') self.tDataset.SetProjection(projection) self.tDataset.SetGeoTransform(self.tGeotransform)
def _getGDALFormats(): """!Get dictionary of avaialble GDAL drivers""" ret = grass.read_command('r.in.gdal', quiet = True, flags = 'f') return _parseFormats(ret), _parseFormats(ret, writableOnly = True)
def test_nodes_layers(self): """Test""" self.assertModule('v.net', input='streets', output=self.network, operation='nodes', flags='c') topology = dict(points=41813, nodes=41813, lines=49746) self.assertVectorFitsTopoInfo(vector=self.network, reference=topology) layers = read_command('v.category', input=self.network, option='layers').strip() self.assertEqual(first="1\n2", second=layers, msg="Layers do not match")
def patch_analysis_per_subregion_parallel(development_diff, subregions, threshold, tmp_clump, tmp_name, nprocs): gcore.run_command("r.clump", input=development_diff, output=tmp_clump, overwrite=True, quiet=True) cats = (gcore.read_command("r.describe", flags="1n", map=subregions, quiet=True).strip().splitlines()) params = [] toremove = [] for cat in cats: tmp_clump_cat = append_random(tmp_name, suffix_length=8) toremove.append(tmp_clump_cat) params.append((tmp_clump_cat, subregions, cat, tmp_clump, threshold)) with Pool(processes=nprocs) as pool: results = pool.map_async(analyse_subregion, params).get() subregions_data = dict(zip(cats, results)) gcore.run_command("g.remove", type="raster", flags="f", name=toremove, quiet=True) return subregions_data
def LoadData(self): """Load data into list""" self.InsertColumn(0, 'Mapset') self.InsertColumn(1, 'Owner') ### self.InsertColumn(2, 'Group') gisenv = grass.gisenv() locationPath = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME']) self.curr_mapset = gisenv['MAPSET'] ret = grass.read_command('g.mapsets', quiet = True, flags = 'l', fs = 'newline') self.mapsets = [] if ret: self.mapsets = ret.splitlines() ret = grass.read_command('g.mapsets', quiet = True, flags = 'p', fs = 'newline') mapsets_access = [] if ret: mapsets_access = ret.splitlines() for mapset in self.mapsets: index = self.InsertStringItem(sys.maxint, mapset) mapsetPath = os.path.join(locationPath, mapset) stat_info = os.stat(mapsetPath) if os.name in ('posix', 'mac'): self.SetStringItem(index, 1, "%s" % pwd.getpwuid(stat_info.st_uid)[0]) # FIXME: get group name ### self.SetStringItem(index, 2, "%-8s" % stat_info.st_gid) else: # FIXME: no pwd under MS Windows (owner: 0, group: 0) self.SetStringItem(index, 1, "%-8s" % stat_info.st_uid) ### self.SetStringItem(index, 2, "%-8s" % stat_info.st_gid) if mapset in mapsets_access: self.CheckItem(self.mapsets.index(mapset), True) self.SetColumnWidth(col=0, width=wx.LIST_AUTOSIZE) self.SetColumnWidth(col=1, width=wx.LIST_AUTOSIZE)
def testLabels(self): """Test type of resulting map""" self.tmpFile.write(input1) self.tmpFile.close() self.assertModule('r.in.poly', input=self.tmpFile.name, output=self.rinpoly, type='DCELL') category = read_command('r.category', map=self.rinpoly, values=[-8, 3, 10.01]).strip() self.assertEqual(first="-8\t{newline}3\tlabel2{newline}10.01\tlabel1".format(newline=os.linesep), second=category, msg="Labels do not match")
def test_connect(self): """Test""" self.assertModule('v.net', input='streets', points='schools', output=self.network, operation='connect', threshold=1000) topology = dict(points=167, nodes=42136, lines=50069) self.assertVectorFitsTopoInfo(vector=self.network, reference=topology) layers = read_command('v.category', input=self.network, option='layers').strip() self.assertEqual(first="1\n2", second=layers, msg="Layers do not match")
def list_layers(dsn): ret = grass.read_command('v.external', flags = 'l', input = dsn) if not ret: sys.exit(1) return ret.splitlines()
def _manpage_search(pattern, name): try: manpage = grass.read_command('g.manual', flags='m', entry=name) except CalledModuleError: # in case man page is missing return False return manpage.lower().find(pattern) > -1
def _getGDALFormats(): """Get dictionary of available GDAL drivers""" try: ret = grass.read_command("r.in.gdal", quiet=True, flags="f") except: ret = None return _parseFormats(ret), _parseFormats(ret, writableOnly=True)
def SetGeorefAndProj(self): """Set georeference and projection to target file """ projection = grass.read_command('g.proj', flags = 'wf') self.tDataset.SetProjection(projection) self.tDataset.SetGeoTransform(self.tGeotransform)
def subsurface_borehole(points, voxel, new, size, offset, axes, unit, env): coordinates = gcore.read_command('v.out.ascii', input=points, format='point', separator=',', env=env).strip() coords_list = [] for coords in coordinates.split(os.linesep): coords_list.extend(coords.split(',')[:2]) gcore.run_command('r3.borehole', overwrite=True, input=voxel, output=new, coordinates=','.join(coords_list), size=size, offset_size=offset, axes=axes, unit=unit, env=env)
def trail_salesman(trails, points, output, env): net_tmp = 'net_tmp' gcore.run_command('v.net', input=trails, points=points, output=net_tmp, operation='connect', threshold=10, overwrite=True, env=env) cats = gcore.read_command('v.category', input=net_tmp, layer=2, option='print', env=env).strip().split(os.linesep) gcore.run_command('v.net.salesman', input=net_tmp, output=output, ccats=','.join(cats), alayer=1, nlayer=2, overwrite=True, env=env)
def test_cats2(self): """Testing cats=2 """ self.assertModule('v.extract', input=self.geology, output=self.output, flags='d', cats="1,2,3,4,5") category = read_command('v.db.select', map=self.output, separator='pipe') self.assertEqual(first=TABLE_2.replace('\n', os.linesep), second=category, msg="Attribute table has wrong entries")
def _getOGRFormats(): """Get dictionary of avaialble OGR drivers""" try: ret = grass.read_command('v.in.ogr', quiet=True, flags='f') except: ret = None return _parseFormats(ret), _parseFormats(ret, writableOnly=True)
def test_formats_random(self): recode = SimpleModule('r.recode', input='random01', output='recoded', rules='-', overwrite=True) recode.inputs.stdin = rules3 self.assertModule(recode) category = read_command('r.category', map='recoded') n_cats = len(category.strip().split('\n')) if n_cats <= 2: self.fail(msg="Number of categories is <= 2 " "which suggests input map values were read as integers.")
def main(): shell = flags['g'] image = {} for band in bands: image[band] = options['image%d' % band] # calculate the Stddev for TM bands grass.message(_("Calculating Standard deviations for all bands...")) stddev = {} for band in bands: grass.verbose("band %d" % band) s = grass.read_command('r.univar', flags = 'g', map = image[band]) kv = grass.parse_key_val(s) stddev[band] = float(kv['stddev']) grass.message(_("Calculating Correlation Matrix...")) correlation = {} s = grass.read_command('r.covar', flags = 'r', map = [image[band] for band in bands]) for i, row in zip(bands, s.splitlines()): for j, cell in zip(bands, row.split(' ')): correlation[i,j] = float(cell) # Calculate all combinations grass.message(_("Calculating OIF for the 20 band combinations...")) oif = [] for p in perms(): oif.append((oifcalc(stddev, correlation, *p), p)) oif.sort(reverse = True) grass.verbose(_("The Optimum Index Factor analysis result " "(Best combination comes first):")) if shell: fmt = "%d%d%d:%f\n" else: fmt = "%d%d%d: %f\n" outf = file('i.oif.result', 'w') for v, p in oif: sys.stdout.write(fmt % (p + (v,))) outf.write(fmt % (p + (v,))) outf.close()
def get_region(): """Returns current computational region as dictionary. Uses standardized key names. Outputs only 2D region values which are usable for conversion to another location. """ gregion_out = gcore.read_command('g.region', flags='pg') region = gcore.parse_key_val(gregion_out, sep='=') return {'east': region['e'], 'north': region['n'], 'west': region['w'], 'south': region['s'], 'rows': region['rows'], 'cols': region['cols']}
def OnGroupSelection(self, event): """!Handler for selecting imagery group """ self.rasterList = [] self.group = event.GetString() ret = grass.read_command('i.group', group = '%s' % self.group, quiet = True, flags = 'g').strip().split('\n') if ret != None and ret != '': self.rasterList = ret
def obtainCategories(vector, layer='1'): """This function returns a list of categories for all areas in the given layer""" vect_cats = [] vc = grass.read_command('v.category', input=vector, layer=layer, option='print', type='centroid') for lc in vc.splitlines(): for cat in lc.split('/'): vect_cats.append(int(cat)) return sorted(set(vect_cats))
def viewshed(scanned_elev, output, vector, visible_color, invisible_color, obs_elev=1.7, env=None): coordinates = gcore.read_command('v.out.ascii', input=vector, separator=',', env=env).strip() coordinate = None for line in coordinates.split(os.linesep): try: coordinate = [float(c) for c in line.split(',')[0:2]] except ValueError: # no points in map pass break if coordinate: gcore.run_command('r.viewshed', flags='b', input=scanned_elev, output=output, coordinates=coordinate, observer_elevation=obs_elev, env=env) gcore.run_command('r.null', map=output, null=0, env=env) gcore.write_command('r.colors', map=output, rules='-', stdin='0 {invis}\n1 {vis}'.format(vis=visible_color, invis=invisible_color), env=env)
def test_no_text_delimeter(self): """Test type of resulting map""" self.assertModule( 'v.in.ascii', input='-', output=self.xyvector, separator='comma', skip=1, x=2, y=3, cat=1, columns="cat int, x double, y double," " ed_cat varchar(20), field_estimate varchar(20)", stdin_=INPUT_NOQUOTES) category = read_command('v.db.select', map=self.xyvector, separator='pipe') self.assertEqual(first=TABLE_1.replace('\n', os.linesep), second=category, msg="Attribute table has wrong entries")
def _getAddons(): try: output = gcore.read_command("g.extension", quiet=True, flags="ag") except CalledModuleError: _warning(_("List of addons cannot be obtained" " because g.extension failed.")) return [] flist = [] for line in output.splitlines(): if not line.startswith("executables"): continue for fexe in line.split("=", 1)[1].split(","): flist.append(fexe) return sorted(flist)
def test_uncommon_delims(self): """Test loading CSV with uncommon delimiters""" self.assertModule( 'v.in.ascii', input='-', output=self.xyvector, separator='@', text='^', skip=1, x=2, y=3, cat=1, columns="cat int, x double, y double," " ed_cat varchar(20), field_estimate varchar(20)", stdin_=INPUT_UNCOMMON) category = read_command('v.db.select', map=self.xyvector, separator='pipe') self.assertEqual(first=TABLE_1.replace('\n', os.linesep), second=category, msg="Attribute table has wrong entries")
def change_detection(before, after, change, height_threshold, cells_threshold, add, max_detected, debug, env): diff_thr = 'diff_thr_' + str(uuid.uuid4()).replace('-', '') diff_thr_clump = 'diff_thr_clump_' + str(uuid.uuid4()).replace('-', '') coeff = gcore.parse_command('r.regression.line', mapx=after, mapy=before, flags='g', env=env) grast.mapcalc('diff = {a} + {b} * {after} - {before}'.format(a=coeff['a'], b=coeff['b'],before=before,after=after), env=env) try: if add: grast.mapcalc("{diff_thr} = if(({a} + {b} * {after} - {before}) > {thr1} &&" " ({a} + {b} * {after} - {before}) < {thr2}, 1, null())".format(a=coeff['a'], b=coeff['b'], diff_thr=diff_thr, after=after, before=before, thr1=height_threshold[0], thr2=height_threshold[1]), env=env) else: grast.mapcalc("{diff_thr} = if(({before} - {a} + {b} * {after}) > {thr}, 1, null())".format(diff_thr=diff_thr, a=coeff['a'], b=coeff['b'], after=after, before=before, thr=height_threshold), env=env) gcore.run_command('r.clump', input=diff_thr, output=diff_thr_clump, env=env) stats = gcore.read_command('r.stats', flags='cn', input=diff_thr_clump, sort='desc', env=env).strip().splitlines() if debug: print 'DEBUG: {}'.format(stats) if len(stats) > 0 and stats[0]: cats = [] found = 0 for stat in stats: if found >= max_detected: break if float(stat.split()[1]) < cells_threshold[1] and float(stat.split()[1]) > cells_threshold[0]: # larger than specified number of cells found += 1 cat, value = stat.split() cats.append(cat) if cats: rules = ['{c}:{c}:1'.format(c=c) for c in cats] gcore.write_command('r.recode', input=diff_thr_clump, output=change, rules='-', stdin='\n'.join(rules), env=env) gcore.run_command('r.volume', flags='f', input=change, clump=diff_thr_clump, centroids=change, env=env) else: gcore.warning("No change found!") gcore.run_command('v.edit', map=change, tool='create', env=env) else: gcore.warning("No change found!") gcore.run_command('v.edit', map=change, tool='create', env=env) gcore.run_command('g.remove', flags='f', type=['raster'], name=[diff_thr, diff_thr_clump], env=env) except: gcore.run_command('g.remove', flags='f', type=['raster'], name=[diff_thr, diff_thr_clump], env=env)
def change_detection(before, after, change, height_threshold, cells_threshold, add, max_detected, env): diff_thr = 'diff_thr_' + str(uuid.uuid4()).replace('-', '') diff_thr_clump = 'diff_thr_clump_' + str(uuid.uuid4()).replace('-', '') change_vector = 'change_vector_' + str(uuid.uuid4()).replace('-', '') if add: gcore.run_command('r.mapcalc', expression="{diff_thr} = if(({after} - {before}) > {thr1} &&" " ({after} - {before}) < {thr2}, 1, null())".format(diff_thr=diff_thr, after=after, before=before, thr1=height_threshold[0], thr2=height_threshold[1]), env=env) else: gcore.run_command('r.mapcalc', expression="{diff_thr} = if(({before} - {after}) > {thr}, 1, null())".format(diff_thr=diff_thr, after=after, before=before, thr=height_threshold), env=env) gcore.run_command('r.clump', input=diff_thr, output=diff_thr_clump, env=env) stats = gcore.read_command('r.stats', flags='cn', input=diff_thr_clump, sort='desc', env=env).strip().split(os.linesep) if len(stats) > 0 and stats[0]: print stats cats = [] found = 0 for stat in stats: if found >= max_detected: break if float(stat.split()[1]) < cells_threshold[1] and float(stat.split()[1]) > cells_threshold[0]: # larger than specified number of cells found += 1 cat, value = stat.split() cats.append(cat) if cats: expression = '{change} = if(('.format(change=change) for i, cat in enumerate(cats): if i != 0: expression += ' || ' expression += '{diff_thr_clump} == {val}'.format(diff_thr_clump=diff_thr_clump, val=cat) expression += '), 1, null())' gcore.run_command('r.mapcalc', overwrite=True, env=env, expression=expression) gcore.run_command('r.to.vect', flags='st', input=change, output=change_vector, type='area', env=env) gcore.run_command('v.to.points', flags='t', input=change_vector, type='centroid', output=change, env=env) else: gcore.warning("No change found!") gcore.run_command('v.edit', map=change, tool='create', env=env) else: gcore.warning("No change found!") gcore.run_command('v.edit', map=change, tool='create', env=env) gcore.run_command('g.remove', flags='f', type='raster', name=[diff_thr, diff_thr_clump], env=env)