def _createVRT(self): '''! create VRT with help of gdalbuildvrt program VRT is a virtual GDAL dataset format @return path to VRT file ''' self._debug("_createVRT", "started") vrt_file = self._tempfile() command = ["gdalbuildvrt", '-te'] command += self.params['boundingbox'] command += [vrt_file, self.xml_file] command = [str(i) for i in command] grass.verbose(' '.join(command)) self.process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.out, self.err = self.process.communicate() self.out, self.err = grass.decode(self.out), grass.decode(self.err) grass.verbose(self.out) if self.err: grass.verbose(self.err+"\n") if "does not exist" in self.err: grass.warning('Coverage "%s" cannot be opened / does not exist.' % self.params['coverage']) grass.fatal("Generation of VRT-File failed (gdalbuildvrt ERROR). Set verbose-flag for details.") self._debug("_createVRT", "finished") return vrt_file
def read_bmp_header(self, header): magic, bmfh, bmih = struct.unpack("2s12s40s10x", header) if grass.decode(magic) != 'BM': raise SyntaxError("Invalid magic number") size, res1, res2, hsize = struct.unpack("<IHHI", bmfh) if hsize != self.HEADER_SIZE: raise SyntaxError("Invalid file header size") hsize, width, height, planes, bpp, compression, imsize, xppm, yppm, cused, cimp = \ struct.unpack("<IiiHHIIiiII", bmih) if hsize != 40: raise SyntaxError("Invalid info header size") self.i_width = width self.i_height = -height if planes != 1: raise SyntaxError("Planar data not supported") if bpp != 32: raise SyntaxError("Only 32-BPP images supported") if compression != 0: raise SyntaxError("Compression not supported") if imsize != self.i_width * self.i_height * 4: raise SyntaxError("Invalid image data size") if size != self.HEADER_SIZE + self.i_width * self.i_height * 4: raise SyntaxError("Invalid image size")
def reproject_region(region, from_proj, to_proj): region = region.copy() proj_input = '{east} {north}\n{west} {south}'.format(**region) proc = gs.start_command('m.proj', input='-', separator=' , ', proj_in=from_proj, proj_out=to_proj, stdin=gs.PIPE, stdout=gs.PIPE, stderr=gs.PIPE) proc.stdin.write(gs.encode(proj_input)) proc.stdin.close() proc.stdin = None proj_output, stderr = proc.communicate() if proc.returncode: raise RuntimeError("reprojecting region: m.proj error: " + stderr) enws = gs.decode(proj_output).split(os.linesep) elon, nlat, unused = enws[0].split(' ') wlon, slat, unused = enws[1].split(' ') region['east'] = elon region['north'] = nlat region['west'] = wlon region['south'] = slat return region
def test_r_slope_aspect_json(self): args = ["r.slope.aspect", "elevation=elevation+https://storage.googleapis.com/graas-geodata/elev_ned_30m.tif", "slope=slope+GTiff", "aspect=aspect+GTiff", "--json"] inputs = [ {"import_descr": {"source": "https://storage.googleapis.com/graas-geodata/elev_ned_30m.tif", "type": "raster"}, "param": "elevation", "value": "elevation"}, {"param": "format", "value": "degrees"}, {"param": "precision", "value": "FCELL"}, {"param": "zscale", "value": "1.0"}, {"param": "min_slope", "value": "0.0"} ] outputs = [ {"export": {"format": "GTiff", "type": "raster"}, "param": "slope", "value": "slope"}, {"export": {"format": "GTiff", "type": "raster"}, "param": "aspect", "value": "aspect"} ] stdout, stderr = subprocess.Popen(args, stdout=subprocess.PIPE).communicate() print(stdout) json_code = json.loads(decode(stdout)) self.assertEqual(json_code["module"], "r.slope.aspect") self.assertEqual(len(json_code["inputs"]), 5) self.assertEqual(json_code["inputs"], inputs) self.assertEqual(json_code["outputs"], outputs)
def test_v_out_ascii(self): args = ["v.out.ascii", "input=hospitals@PERMANENT", "output=myfile+TXT", "--json"] inputs = [ {"param": "input", "value": "hospitals@PERMANENT"}, {"param": "layer", "value": "1"}, {"param": "type", "value": "point,line,boundary,centroid,area,face,kernel"}, {"param": "format", "value": "point"}, {"param": "separator", "value": "pipe"}, {"param": "precision", "value": "8"} ] outputs = [ {"export": {"format": "TXT", "type": "file"}, "param": "output", "value": "$file::myfile"} ] stdout, stderr = subprocess.Popen(args, stdout=subprocess.PIPE).communicate() print(stdout) json_code = json.loads(decode(stdout)) self.assertEqual(json_code["module"], "v.out.ascii") self.assertEqual(len(json_code["inputs"]), 6) self.assertEqual(json_code["inputs"], inputs) self.assertEqual(json_code["outputs"], outputs)
def read_header(self, infile): wind = {} for i in range(6): line = grass.decode(infile.readline()).rstrip("\r\n") f = line.split(":") key = f[0] val = f[1].strip() (k, f) = wind_keys[key] wind[k] = (f)(val) return wind
def copy_colors(fh, map, offset): p = gscript.pipe_command("r.colors.out", map=map) for line in p.stdout: f = gscript.decode(line).rstrip("\r\n").split(" ") if offset: if f[0] in ["nv", "default"]: continue f[0] = str(float(f[0]) + offset) fh.write(gscript.encode(" ".join(f) + "\n")) p.wait()
def copy_colors(fh, map, offset): p = gscript.pipe_command('r.colors.out', map=map) for line in p.stdout: f = gscript.decode(line).rstrip('\r\n').split(' ') if offset: if f[0] in ['nv', 'default']: continue f[0] = str(float(f[0]) + offset) fh.write(gscript.encode(' '.join(f) + '\n')) p.wait()
def SetFromcStatistics(self, cStatistics): """Sets all statistical values. Copies all statistic values from \a cStattistics. :param cStatistics: pointer to C statistics structure """ cat = c_int() set_stats = {} I_iclass_statistics_get_cat(cStatistics, byref(cat)) if self.category != cat.value: set_stats["category"] = cat.value name = c_char_p() I_iclass_statistics_get_name(cStatistics, byref(name)) if self.name != name.value: set_stats["name"] = grass.decode(name.value) color = c_char_p() I_iclass_statistics_get_color(cStatistics, byref(color)) if self.color != color.value: set_stats["color"] = grass.decode(color.value) nbands = c_int() I_iclass_statistics_get_nbands(cStatistics, byref(nbands)) if self.nbands != nbands.value: set_stats["nbands"] = nbands.value ncells = c_int() I_iclass_statistics_get_ncells(cStatistics, byref(ncells)) if self.ncells != ncells.value: set_stats["ncells"] = ncells.value nstd = c_float() I_iclass_statistics_get_nstd(cStatistics, byref(nstd)) if self.nstd != nstd.value: set_stats["nstd"] = nstd.value self.SetStatistics(set_stats) self.SetBandStatistics(cStatistics)
def test_v_info(self): args = ["v.info", "map=hospitals@PERMANENT", "-c", "--json"] inputs = [ {"param": "map", "value": "hospitals@PERMANENT"}, {"param": "layer", "value": "1"}, ] stdout, stderr = subprocess.Popen(args, stdout=subprocess.PIPE).communicate() print(stdout) json_code = json.loads(decode(stdout)) self.assertEqual(json_code["module"], "v.info") self.assertEqual(len(json_code["inputs"]), 2) self.assertEqual(json_code["inputs"], inputs)
def _fetchCapabilities(self, options): """!Download capabilities from WMS server""" cap_url = options["url"].strip() if "?" in cap_url: cap_url += "&" else: cap_url += "?" if "WMTS" in options["driver"]: cap_url += "SERVICE=WMTS&REQUEST=GetCapabilities&VERSION=1.0.0" elif "OnEarth" in options["driver"]: cap_url += "REQUEST=GetTileService" else: cap_url += ( "SERVICE=WMS&REQUEST=GetCapabilities&VERSION=" + options["wms_version"] ) if options["urlparams"]: cap_url += "&" + options["urlparams"] grass.debug("Fetching capabilities file.\n%s" % cap_url) try: cap = self._fetchDataFromServer( cap_url, options["username"], options["password"] ) except (IOError, HTTPException) as e: if isinstance(e, HTTPError) and e.code == 401: grass.fatal( _("Authorization failed to <%s> when fetching capabilities") % options["url"] ) else: msg = _("Unable to fetch capabilities from <{0}>. Reason: ").format( options["url"] ) if hasattr(e, "reason"): msg += "{0}".format(e.reason) else: msg += "{0}".format(e) grass.fatal(msg) grass.debug("Fetching capabilities OK") return grass.decode(cap.read())
def _fetchCapabilities(self, options): """!Download capabilities from WMS server """ cap_url = options['url'].strip() if "?" in cap_url: cap_url += "&" else: cap_url += "?" if 'WMTS' in options['driver']: cap_url += "SERVICE=WMTS&REQUEST=GetCapabilities&VERSION=1.0.0" elif 'OnEarth' in options['driver']: cap_url += "REQUEST=GetTileService" else: cap_url += "SERVICE=WMS&REQUEST=GetCapabilities&VERSION=" + options[ 'wms_version'] if options['urlparams']: cap_url += "&" + options['urlparams'] grass.debug('Fetching capabilities file.\n%s' % cap_url) try: cap = self._fetchDataFromServer(cap_url, options['username'], options['password']) except (IOError, HTTPException) as e: if isinstance(e, HTTPError) and e.code == 401: grass.fatal( _("Authorization failed to <%s> when fetching capabilities" ) % options['url']) else: msg = _("Unable to fetch capabilities from <{0}>. Reason: " ).format(options['url']) if hasattr(e, 'reason'): msg += '{0}'.format(e.reason) else: msg += '{0}'.format(e) grass.fatal(msg) grass.debug('Fetching capabilities OK') return grass.decode(cap.read())
def proj_to_wgs84(region): proj_in = '{east} {north}\n{west} {south}'.format(**region) proc = gs.start_command('m.proj', input='-', separator=' , ', flags='od', stdin=gs.PIPE, stdout=gs.PIPE, stderr=gs.PIPE) proc.stdin.write(gs.encode(proj_in)) proc.stdin.close() proc.stdin = None proj_out, errors = proc.communicate() if proc.returncode: raise RuntimeError("m.proj error: %s" % errors) enws = gs.decode(proj_out).split(os.linesep) elon, nlat, unused = enws[0].split(' ') wlon, slat, unused = enws[1].split(' ') return {'east': elon, 'north': nlat, 'west': wlon, 'south': slat}
def _validate_xml(self, xml, xsd): """Validate xml file against xsd schema :param str xml: xml file path :param str xsd: xsd file path """ _xsd = etree.parse(xsd) xsd_schema = etree.XMLSchema(_xsd) if not xsd_schema.validate(etree.parse(xml)): gscript.fatal( _( "Connnections resources xml file '{xml}' " "is not valid.\n\n{xsd_schema}".format( xml=xml, xsd_schema=gscript.decode( etree.tostring( _xsd, pretty_print=True, ), ), ), ), )
def reproject_region(region, from_proj, to_proj): """Reproject boundary of region from one projection to another. :param dict region: region to reproject as a dictionary with long key names output of get_region :param str from_proj: PROJ.4 string of region; output of get_location_proj_string :param str in_proj: PROJ.4 string of target location; output of get_location_proj_string :return dict region: reprojected region as a dictionary with long key names """ region = region.copy() proj_input = ( f"{region['east']} {region['north']}\n{region['west']} {region['south']}" ) proc = gs.start_command( "m.proj", input="-", separator=" , ", proj_in=from_proj, proj_out=to_proj, flags="d", stdin=gs.PIPE, stdout=gs.PIPE, stderr=gs.PIPE, ) proc.stdin.write(gs.encode(proj_input)) proc.stdin.close() proc.stdin = None proj_output, stderr = proc.communicate() if proc.returncode: raise RuntimeError( _("Encountered error while running m.proj: {}").format(stderr)) enws = gs.decode(proj_output).split(os.linesep) elon, nlat, unused = enws[0].split(" ") wlon, slat, unused = enws[1].split(" ") region["east"] = elon region["north"] = nlat region["west"] = wlon region["south"] = slat return region
def matchhist(original, target, matched): # pan/intensity histogram matching using numpy arrays grass.message(_("Histogram matching...")) # input images original = original.split("@")[0] target = target.split("@")[0] images = [original, target] # create a dictionary to hold arrays for each image arrays = {} for img in images: # calculate number of cells for each grey value for for each image stats_out = grass.pipe_command("r.stats", flags="cin", input=img, sep=":") stats = grass.decode(stats_out.communicate()[0]).split("\n")[:-1] stats_dict = dict(s.split(":", 1) for s in stats) total_cells = 0 # total non-null cells for j in stats_dict: stats_dict[j] = int(stats_dict[j]) if j != "*": total_cells += stats_dict[j] if total_cells < 1: grass.fatal(_("Input has no data. Check region settings.")) # Make a 2x256 structured array for each image with a # cumulative distribution function (CDF) for each grey value. # Grey value is the integer (i4) and cdf is float (f4). arrays[img] = np.zeros((256,), dtype=("i4,f4")) cum_cells = ( 0 # cumulative total of cells for sum of current and all lower grey values ) for n in range(0, 256): if str(n) in stats_dict: num_cells = stats_dict[str(n)] else: num_cells = 0 cum_cells += num_cells # cdf is the the number of cells at or below a given grey value # divided by the total number of cells cdf = float(cum_cells) / float(total_cells) # insert values into array arrays[img][n] = (n, cdf) # open file for reclass rules outfile = open(grass.tempfile(), "w") for i in arrays[original]: # for each grey value and corresponding cdf value in original, find the # cdf value in target that is closest to the target cdf value difference_list = [] for j in arrays[target]: # make a list of the difference between each original cdf value and # the target cdf value difference_list.append(abs(i[1] - j[1])) # get the smallest difference in the list min_difference = min(difference_list) for j in arrays[target]: # find the grey value in target that corresponds to the cdf # closest to the original cdf if j[1] <= i[1] + min_difference and j[1] >= i[1] - min_difference: # build a reclass rules file from the original grey value and # corresponding grey value from target out_line = "%d = %d\n" % (i[0], j[0]) outfile.write(out_line) break outfile.close() # create reclass of target from reclass rules file result = grass.core.find_file(matched, element="cell") if result["fullname"]: grass.run_command( "g.remove", flags="f", quiet=True, type="raster", name=matched ) grass.run_command("r.reclass", input=original, out=matched, rules=outfile.name) else: grass.run_command("r.reclass", input=original, out=matched, rules=outfile.name) # Cleanup # remove the rules file grass.try_remove(outfile.name) # return reclass of target with histogram that matches original return matched
def main(): vinput = options["input"] columns = options["columns"].split(",") binary = options["developed_column"] level = options["subregions_column"] sep = gutils.separator(options["separator"]) minim = int(options["min_variables"]) dredge = flags["d"] if options["max_variables"]: maxv = int(options["max_variables"]) else: maxv = len(columns) if dredge and minim > maxv: gscript.fatal( _("Minimum number of predictor variables is larger than maximum number" )) global TMP_CSV, TMP_RSCRIPT, TMP_POT TMP_CSV = gscript.tempfile(create=False) + ".csv" TMP_RSCRIPT = gscript.tempfile() include_level = True distinct = gscript.read_command( "v.db.select", flags="c", map=vinput, columns="distinct {level}".format(level=level), ).strip() if len(distinct.splitlines()) <= 1: include_level = False single_level = distinct.splitlines()[0] with open(TMP_RSCRIPT, "w") as f: f.write(rscript) TMP_POT = gscript.tempfile(create=False) + "_potential.csv" columns += [binary] if include_level: columns += [level] where = "{c} IS NOT NULL".format(c=columns[0]) for c in columns[1:]: where += " AND {c} IS NOT NULL".format(c=c) gscript.run_command( "v.db.select", map=vinput, columns=columns, separator="comma", where=where, file=TMP_CSV, ) if dredge: gscript.info(_("Running automatic model selection ...")) else: gscript.info(_("Computing model...")) cmd = [ "Rscript", TMP_RSCRIPT, "-i", TMP_CSV, "-r", binary, "-m", str(minim), "-x", str(maxv), "-o", TMP_POT, "-d", "TRUE" if dredge else "FALSE", ] if include_level: cmd += ["-l", level] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() gscript.warning(gscript.decode(stderr)) if p.returncode != 0: gscript.warning(gscript.decode(stderr)) gscript.fatal(_("Running R script failed, check messages above")) gscript.info(_("Best model summary:")) gscript.info("-------------------------") gscript.message(gscript.decode(stdout)) with open(TMP_POT, "r") as fin, open(options["output"], "w") as fout: i = 0 for line in fin.readlines(): row = line.strip().split("\t") row = [each.strip('"') for each in row] if i == 0: row[0] = "ID" row[1] = "Intercept" if i == 1 and not include_level: row[0] = single_level fout.write(sep.join(row)) fout.write("\n") i += 1
def main(): bboxcrs = options['crs'] in_n = float(options['n']) in_s = float(options['s']) in_w = float(options['w']) in_e = float(options['e']) raster = options['raster'] strds = options['strds'] source = osr.SpatialReference() # there is no ImportFromAnything fn in GDAL OSR, thus # feed crs to projinfo and get WKT if not grass.find_program("projinfo"): grass.fatal( _("projinfo program not found, install PROJ first: \ https://proj.org")) cmd = ["projinfo", "-q", "-o", "WKT2:2019", bboxcrs] p = grass.Popen(cmd, stdout=grass.PIPE) inwkt = p.communicate()[0] inwkt = grass.decode(inwkt) source.ImportFromWkt(inwkt) # make sure easting is first axis source.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) outwkt = grass.read_command('g.proj', flags='w') target = osr.SpatialReference() target.ImportFromWkt(outwkt) # make sure easting is first axis target.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) transform = osr.CoordinateTransformation(source, target) lower_left = ogr.CreateGeometryFromWkt(f"POINT ({in_w} {in_s})") lower_left.Transform(transform) upper_right = ogr.CreateGeometryFromWkt(f"POINT ({in_e} {in_n})") upper_right.Transform(transform) out_w = lower_left.GetPoint()[0] out_s = lower_left.GetPoint()[1] out_e = upper_right.GetPoint()[0] out_n = upper_right.GetPoint()[1] stepsize = (in_n - in_s) / 21.0 counter = 1 while counter < 21: x = in_w y = in_s + counter * stepsize border_point = ogr.CreateGeometryFromWkt(f"POINT ({x} {y})") border_point.Transform(transform) out_x = border_point.GetPoint()[0] out_y = border_point.GetPoint()[1] if out_w > out_x: out_w = out_x if out_e < out_x: out_e = out_x if out_s > out_y: out_s = out_y if out_n < out_y: out_n = out_y counter = counter + 1 stepsize = (in_e - in_w) / 21.0 counter = 1 while counter < 21: x = in_w + counter * stepsize y = in_s border_point = ogr.CreateGeometryFromWkt(f"POINT ({x} {y})") border_point.Transform(transform) if out_w > out_x: out_w = out_x if out_e < out_x: out_e = out_x if out_s > out_y: out_s = out_y if out_n < out_y: out_n = out_y counter = counter + 1 outflags = "" if flags['p']: outflags = 'p' if flags['g']: outflags = 'g' if raster: grass.run_command('g.region', n=out_n, s=out_s, w=out_w, e=out_e, align=raster, flags=outflags) elif strds: strds_info = grass.parse_command('t.info', input=strds, flags='g', delimiter='=') res = ( (float(strds_info['nsres_min']) + float(strds_info['ewres_min'])) / 2.0) outflags = outflags + 'a' grass.run_command('g.region', n=out_n, s=out_s, w=out_w, e=out_e, res=res, flags=outflags) else: grass.run_command('g.region', n=out_n, s=out_s, w=out_w, e=out_e, flags=outflags) return 0
def main(): """Do the main work""" alias_output = options["alias_output"] bgr_mask = options["bgr_mask"] null_value = options["null_value"] bgr_output = options["bgr_output"] species_output = options["species_output"] alias, parameters = parse_bgr_input( options["alias_input"], options["env_maps"], options["alias_names"] ) species_dict = parse_species_input( options["species_masks"], options["species_names"] ) # Check if a mask file allready exists if RasterRow("MASK", Mapset().name).exist(): gscript.verbose( _("A mask allready exists. Renaming existing mask to old_MASK...") ) gscript.run_command( "g.rename", rast="MASK,{}_MASK".format(TMP_NAME), quiet=True ) # Build parameter header if necessary header = ",".join(alias) # Write alias output if requested if alias_output: with open(alias_output, "w") as alias_out: for idx, name in enumerate(alias): alias_out.write("{},{}\n".format(name, parameters[idx])) # Check if specie output is requested and produce it if species_output and species_dict: # Write header to species output SWD file species_header = "species,X,Y,{}\n".format(header) with open(species_output, "w") as sp_out: sp_out.write(species_header) # Parse species input variables for species in species_dict: species_map = species_dict[species] # Zoom region to match specie map if requested if flags["z"]: gscript.verbose( _("Zooming region to species {} temporarily.".format(species)) ) gscript.use_temp_region() gscript.run_command( "g.region", align="@".join(species_map), zoom="@".join(species_map) ) # # Apply specie mask gscript.run_command( "r.mask", raster="@".join(species_map), overwrite=True, quiet=True ) # Export data using r.stats gscript.verbose(_("Producing output for species {}".format(species))) stats = gscript.pipe_command( "r.stats", flags="1gN", verbose=True, input=",".join(parameters), separator=",", null_value=null_value, ) with open(species_output, "a") as sp_out: for row in stats.stdout: sp_out.write("{},{}".format(species, gscript.decode(row))) # Redo zoom region to match specie map if it had been requested if flags["z"]: gscript.del_temp_region() # Remove mask gscript.run_command("r.mask", flags="r", quiet=True) # Write header to background output SWD file bgr_header = "bgr,X,Y,{}\n".format(",".join(alias)) with open(bgr_output, "w") as bgr_out: bgr_out.write(bgr_header) # Process map data for background # Check if a mask file allready exists if bgr_mask: gscript.verbose( _("Using map {} as mask for the background landscape...".format(bgr_mask)) ) # Apply mask gscript.run_command("r.mask", raster=bgr_mask, overwrite=True, quiet=True) # # Export data using r.stats gscript.verbose(_("Producing output for background landscape")) stats = gscript.pipe_command( "r.stats", flags="1gN", input=",".join(parameters), separator=",", null_value=null_value, ) with open(bgr_output, "a") as bgr_out: for row in stats.stdout: bgr_out.write("bgr,{}".format(gscript.decode(row))) cleanup()
def main(): vinput = options['input'] columns = options['columns'].split(',') binary = options['developed_column'] level = options['subregions_column'] sep = gutils.separator(options['separator']) minim = int(options['min_variables']) dredge = flags['d'] if options['max_variables']: maxv = int(options['max_variables']) else: maxv = len(columns) if dredge and minim > maxv: gscript.fatal(_("Minimum number of predictor variables is larger than maximum number")) global TMP_CSV, TMP_RSCRIPT, TMP_POT TMP_CSV = gscript.tempfile(create=False) + '.csv' TMP_RSCRIPT = gscript.tempfile() include_level = True distinct = gscript.read_command('v.db.select', flags='c', map=vinput, columns="distinct {level}".format(level=level)).strip() if len(distinct.splitlines()) <= 1: include_level = False single_level = distinct.splitlines()[0] with open(TMP_RSCRIPT, 'w') as f: f.write(rscript) TMP_POT = gscript.tempfile(create=False) + '_potential.csv' columns += [binary] if include_level: columns += [level] where = "{c} IS NOT NULL".format(c=columns[0]) for c in columns[1:]: where += " AND {c} IS NOT NULL".format(c=c) gscript.run_command('v.db.select', map=vinput, columns=columns, separator='comma', where=where, file=TMP_CSV) if dredge: gscript.info(_("Running automatic model selection ...")) else: gscript.info(_("Computing model...")) cmd = ['Rscript', TMP_RSCRIPT, '-i', TMP_CSV, '-r', binary, '-m', str(minim), '-x', str(maxv), '-o', TMP_POT, '-d', 'TRUE' if dredge else 'FALSE'] if include_level: cmd += ['-l', level] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() gscript.warning(gscript.decode(stderr)) if p.returncode != 0: gscript.warning(gscript.decode(stderr)) gscript.fatal(_("Running R script failed, check messages above")) gscript.info(_("Best model summary:")) gscript.info("-------------------------") gscript.message(gscript.decode(stdout)) with open(TMP_POT, 'r') as fin, open(options['output'], 'w') as fout: i = 0 for line in fin.readlines(): row = line.strip().split('\t') row = [each.strip('"') for each in row] if i == 0: row[0] = "ID" row[1] = "Intercept" if i == 1 and not include_level: row[0] = single_level fout.write(sep.join(row)) fout.write('\n') i += 1
def read_data(self, infile): values = [] for row in range(self.wind["rows"]): line = grass.decode(infile.readline()).rstrip("\r\n") values.append(line.split()) return values
def main(): vinput = options["input"] columns = options["columns"].split(",") binary = options["developed_column"] level = options["subregions_column"] random = options["random_column"] sep = gutils.separator(options["separator"]) minim = int(options["min_variables"]) dredge = flags["d"] nprocs = int(options["nprocs"]) fixed_columns = (options["fixed_columns"].split(",") if options["fixed_columns"] else []) for each in fixed_columns: if each not in columns: gscript.fatal( _("Fixed predictor {} not among predictors specified in option 'columns'" ).format(each)) if options["max_variables"]: maxv = int(options["max_variables"]) else: maxv = len(columns) if dredge and minim > maxv: gscript.fatal( _("Minimum number of predictor variables is larger than maximum number" )) if not gscript.find_program("Rscript", "--version"): gscript.fatal( _("Rscript required for running r.futures.potential, but not found. " "Make sure you have R installed and added to the PATH.")) global TMP_CSV, TMP_RSCRIPT, TMP_POT, TMP_DREDGE TMP_CSV = gscript.tempfile(create=False) + ".csv" TMP_RSCRIPT = gscript.tempfile() include_level = True distinct = gscript.read_command( "v.db.select", flags="c", map=vinput, columns="distinct {level}".format(level=level), ).strip() if len(distinct.splitlines()) <= 1: include_level = False single_level = distinct.splitlines()[0] with open(TMP_RSCRIPT, "w") as f: f.write(rscript) TMP_POT = gscript.tempfile(create=False) + "_potential.csv" TMP_DREDGE = gscript.tempfile(create=False) + "_dredge.csv" columns += [binary] if include_level: columns += [level] if random: columns += [random] # filter duplicates columns = list(dict.fromkeys(columns)) where = "{c} IS NOT NULL".format(c=columns[0]) for c in columns[1:]: where += " AND {c} IS NOT NULL".format(c=c) gscript.run_command( "v.db.select", map=vinput, columns=columns, separator="comma", where=where, file=TMP_CSV, ) if dredge: gscript.info(_("Running automatic model selection ...")) else: gscript.info(_("Computing model...")) cmd = [ "Rscript", TMP_RSCRIPT, "-i", TMP_CSV, "-r", binary, "-o", TMP_POT, "-p", ",".join(columns), "-m", str(minim), "-x", str(maxv), "-d", "TRUE" if dredge else "FALSE", "-n", str(nprocs), ] if include_level: cmd += ["-l", level] if random: cmd += ["-a", random] if dredge and fixed_columns: cmd += ["-f", ",".join(fixed_columns)] if dredge and options["dredge_output"]: cmd += ["-e", TMP_DREDGE] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() if stderr: gscript.warning(gscript.decode(stderr)) if p.returncode != 0: gscript.fatal(_("Running R script failed, check messages above")) gscript.info(_("Best model summary:")) gscript.info("-------------------------") gscript.message(gscript.decode(stdout)) # note: this would be better with pandas, but adds dependency with open(TMP_POT, "r") as fin, open(options["output"], "w") as fout: i = 0 for line in fin.readlines(): row = line.strip().split("\t") row = [each.strip('"') for each in row] if i == 0: row[0] = "ID" if include_level and random: row[2] = row[1] row[1] = "Intercept" if i == 1 and not include_level: row[0] = single_level if i >= 1: if include_level: # devpressure needs to be after intercept if random: row[2], row[1] = row[1], row[2] else: row[0] = single_level fout.write(sep.join(row)) fout.write("\n") i += 1 if options["dredge_output"]: with open(TMP_DREDGE, "r") as fin, open(options["dredge_output"], "w") as fout: i = 0 for line in fin.readlines(): row = line.strip().split(",") row = [each.strip('"') for each in row] if i == 0: row[0] = "ID" row[1] = "Intercept" fout.write(sep.join(row)) fout.write("\n") i += 1
def main(): global usermask, mapset, tmp_rmaps, tmp_vmaps input = options['input'] output = options['output'] tension = options['tension'] smooth = options['smooth'] method = options['method'] edge = int(options['edge']) segmax = int(options['segmax']) npmin = int(options['npmin']) lambda_ = float(options['lambda']) memory = options['memory'] quiet = True # FIXME mapset = grass.gisenv()['MAPSET'] unique = str(os.getpid()) # Shouldn't we use temp name? prefix = 'r_fillnulls_%s_' % unique failed_list = list( ) # a list of failed holes. Caused by issues with v.surf.rst. Connected with #1813 # check if input file exists if not grass.find_file(input)['file']: grass.fatal(_("Raster map <%s> not found") % input) # save original region reg_org = grass.region() # check if a MASK is already present # and remove it to not interfere with NULL lookup part # as we don't fill MASKed parts! if grass.find_file('MASK', mapset=mapset)['file']: usermask = "usermask_mask." + unique grass.message(_("A user raster mask (MASK) is present. Saving it...")) grass.run_command('g.rename', quiet=quiet, raster=('MASK', usermask)) # check if method is rst to use v.surf.rst if method == 'rst': # idea: filter all NULLS and grow that area(s) by 3 pixel, then # interpolate from these surrounding 3 pixel edge filling = prefix + 'filled' grass.use_temp_region() grass.run_command('g.region', align=input, quiet=quiet) region = grass.region() ns_res = region['nsres'] ew_res = region['ewres'] grass.message(_("Using RST interpolation...")) grass.message(_("Locating and isolating NULL areas...")) # creating binary (0/1) map if usermask: grass.message(_("Skipping masked raster parts")) grass.mapcalc( "$tmp1 = if(isnull(\"$input\") && !($mask == 0 || isnull($mask)),1,null())", tmp1=prefix + 'nulls', input=input, mask=usermask) else: grass.mapcalc("$tmp1 = if(isnull(\"$input\"),1,null())", tmp1=prefix + 'nulls', input=input) tmp_rmaps.append(prefix + 'nulls') # restoring user's mask, if present # to ignore MASKed original values if usermask: grass.message(_("Restoring user mask (MASK)...")) try: grass.run_command('g.rename', quiet=quiet, raster=(usermask, 'MASK')) except CalledModuleError: grass.warning(_("Failed to restore user MASK!")) usermask = None # grow identified holes by X pixels grass.message(_("Growing NULL areas")) tmp_rmaps.append(prefix + 'grown') try: grass.run_command('r.grow', input=prefix + 'nulls', radius=edge + 0.01, old=1, new=1, out=prefix + 'grown', quiet=quiet) except CalledModuleError: grass.fatal( _("abandoned. Removing temporary map, restoring " "user mask if needed:")) # assign unique IDs to each hole or hole system (holes closer than edge distance) grass.message(_("Assigning IDs to NULL areas")) tmp_rmaps.append(prefix + 'clumped') try: grass.run_command('r.clump', input=prefix + 'grown', output=prefix + 'clumped', quiet=quiet) except CalledModuleError: grass.fatal( _("abandoned. Removing temporary map, restoring " "user mask if needed:")) # get a list of unique hole cat's grass.mapcalc("$out = if(isnull($inp), null(), $clumped)", out=prefix + 'holes', inp=prefix + 'nulls', clumped=prefix + 'clumped') tmp_rmaps.append(prefix + 'holes') # use new IDs to identify holes try: grass.run_command('r.to.vect', flags='v', input=prefix + 'holes', output=prefix + 'holes', type='area', quiet=quiet) except: grass.fatal( _("abandoned. Removing temporary maps, restoring " "user mask if needed:")) tmp_vmaps.append(prefix + 'holes') # get a list of unique hole cat's cats_file_name = grass.tempfile(False) grass.run_command('v.db.select', flags='c', map=prefix + 'holes', columns='cat', file=cats_file_name, quiet=quiet) cat_list = list() cats_file = open(cats_file_name) for line in cats_file: cat_list.append(line.rstrip('\n')) cats_file.close() os.remove(cats_file_name) if len(cat_list) < 1: grass.fatal(_("Input map has no holes. Check region settings.")) # GTC Hole is NULL area in a raster map grass.message(_("Processing %d map holes") % len(cat_list)) first = True hole_n = 1 for cat in cat_list: holename = prefix + 'hole_' + cat # GTC Hole is a NULL area in a raster map grass.message(_("Filling hole %s of %s") % (hole_n, len(cat_list))) hole_n = hole_n + 1 # cut out only CAT hole for processing try: grass.run_command('v.extract', input=prefix + 'holes', output=holename + '_pol', cats=cat, quiet=quiet) except CalledModuleError: grass.fatal( _("abandoned. Removing temporary maps, restoring " "user mask if needed:")) tmp_vmaps.append(holename + '_pol') # zoom to specific hole with a buffer of two cells around the hole to # remove rest of data try: grass.run_command('g.region', vector=holename + '_pol', align=input, w='w-%d' % (edge * 2 * ew_res), e='e+%d' % (edge * 2 * ew_res), n='n+%d' % (edge * 2 * ns_res), s='s-%d' % (edge * 2 * ns_res), quiet=quiet) except CalledModuleError: grass.fatal( _("abandoned. Removing temporary maps, restoring " "user mask if needed:")) # remove temporary map to not overfill disk try: grass.run_command('g.remove', flags='fb', type='vector', name=holename + '_pol', quiet=quiet) except CalledModuleError: grass.fatal( _("abandoned. Removing temporary maps, restoring " "user mask if needed:")) tmp_vmaps.remove(holename + '_pol') # copy only data around hole grass.mapcalc("$out = if($inp == $catn, $inp, null())", out=holename, inp=prefix + 'holes', catn=cat) tmp_rmaps.append(holename) # If here loop is split into two, next part of loop can be run in parallel # (except final result patching) # Downside - on large maps such approach causes large disk usage # grow hole border to get it's edge area tmp_rmaps.append(holename + '_grown') try: grass.run_command('r.grow', input=holename, radius=edge + 0.01, old=-1, out=holename + '_grown', quiet=quiet) except CalledModuleError: grass.fatal( _("abandoned. Removing temporary map, restoring " "user mask if needed:")) # no idea why r.grow old=-1 doesn't replace existing values with NULL grass.mapcalc("$out = if($inp == -1, null(), \"$dem\")", out=holename + '_edges', inp=holename + '_grown', dem=input) tmp_rmaps.append(holename + '_edges') # convert to points for interpolation tmp_vmaps.append(holename) try: grass.run_command('r.to.vect', input=holename + '_edges', output=holename, type='point', flags='z', quiet=quiet) except CalledModuleError: grass.fatal( _("abandoned. Removing temporary maps, restoring " "user mask if needed:")) # count number of points to control segmax parameter for interpolation: pointsnumber = grass.vector_info_topo(map=holename)['points'] grass.verbose(_("Interpolating %d points") % pointsnumber) if pointsnumber < 2: grass.verbose(_("No points to interpolate")) failed_list.append(holename) continue # Avoid v.surf.rst warnings if pointsnumber < segmax: use_npmin = pointsnumber use_segmax = pointsnumber * 2 else: use_npmin = npmin use_segmax = segmax # launch v.surf.rst tmp_rmaps.append(holename + '_dem') try: grass.run_command('v.surf.rst', quiet=quiet, input=holename, elev=holename + '_dem', tension=tension, smooth=smooth, segmax=use_segmax, npmin=use_npmin) except CalledModuleError: # GTC Hole is NULL area in a raster map grass.fatal(_("Failed to fill hole %s") % cat) # v.surf.rst sometimes fails with exit code 0 # related bug #1813 if not grass.find_file(holename + '_dem')['file']: try: tmp_rmaps.remove(holename) tmp_rmaps.remove(holename + '_grown') tmp_rmaps.remove(holename + '_edges') tmp_rmaps.remove(holename + '_dem') tmp_vmaps.remove(holename) except: pass grass.warning( _("Filling has failed silently. Leaving temporary maps " "with prefix <%s> for debugging.") % holename) failed_list.append(holename) continue # append hole result to interpolated version later used to patch into original DEM if first: tmp_rmaps.append(filling) grass.run_command('g.region', align=input, raster=holename + '_dem', quiet=quiet) grass.mapcalc("$out = if(isnull($inp), null(), $dem)", out=filling, inp=holename, dem=holename + '_dem') first = False else: tmp_rmaps.append(filling + '_tmp') grass.run_command('g.region', align=input, raster=(filling, holename + '_dem'), quiet=quiet) grass.mapcalc( "$out = if(isnull($inp), if(isnull($fill), null(), $fill), $dem)", out=filling + '_tmp', inp=holename, dem=holename + '_dem', fill=filling) try: grass.run_command('g.rename', raster=(filling + '_tmp', filling), overwrite=True, quiet=quiet) except CalledModuleError: grass.fatal( _("abandoned. Removing temporary maps, restoring user " "mask if needed:")) # this map has been removed. No need for later cleanup. tmp_rmaps.remove(filling + '_tmp') # remove temporary maps to not overfill disk try: tmp_rmaps.remove(holename) tmp_rmaps.remove(holename + '_grown') tmp_rmaps.remove(holename + '_edges') tmp_rmaps.remove(holename + '_dem') except: pass try: grass.run_command('g.remove', quiet=quiet, flags='fb', type='raster', name=(holename, holename + '_grown', holename + '_edges', holename + '_dem')) except CalledModuleError: grass.fatal( _("abandoned. Removing temporary maps, restoring " "user mask if needed:")) try: tmp_vmaps.remove(holename) except: pass try: grass.run_command('g.remove', quiet=quiet, flags='fb', type='vector', name=holename) except CalledModuleError: grass.fatal( _("abandoned. Removing temporary maps, restoring user mask if needed:" )) # check if method is different from rst to use r.resamp.bspline if method != 'rst': grass.message(_("Using %s bspline interpolation") % method) # clone current region grass.use_temp_region() grass.run_command('g.region', align=input) reg = grass.region() # launch r.resamp.bspline tmp_rmaps.append(prefix + 'filled') # If there are no NULL cells, r.resamp.bslpine call # will end with an error although for our needs it's fine # Only problem - this state must be read from stderr new_env = dict(os.environ) new_env['LC_ALL'] = 'C' if usermask: try: p = grass.core.start_command('r.resamp.bspline', input=input, mask=usermask, output=prefix + 'filled', method=method, ew_step=3 * reg['ewres'], ns_step=3 * reg['nsres'], lambda_=lambda_, memory=memory, flags='n', stderr=subprocess.PIPE, env=new_env) stderr = grass.decode(p.communicate()[1]) if "No NULL cells found" in stderr: grass.run_command('g.copy', raster='%s,%sfilled' % (input, prefix), overwrite=True) p.returncode = 0 grass.warning( _("Input map <%s> has no holes. Copying to output without modification." ) % (input, )) except CalledModuleError as e: grass.fatal( _("Failure during bspline interpolation. Error message: %s" ) % stderr) else: try: p = grass.core.start_command('r.resamp.bspline', input=input, output=prefix + 'filled', method=method, ew_step=3 * reg['ewres'], ns_step=3 * reg['nsres'], lambda_=lambda_, memory=memory, flags='n', stderr=subprocess.PIPE, env=new_env) stderr = grass.decode(p.communicate()[1]) if "No NULL cells found" in stderr: grass.run_command('g.copy', raster='%s,%sfilled' % (input, prefix), overwrite=True) p.returncode = 0 grass.warning( _("Input map <%s> has no holes. Copying to output without modification." ) % (input, )) except CalledModuleError as e: grass.fatal( _("Failure during bspline interpolation. Error message: %s" ) % stderr) # restoring user's mask, if present: if usermask: grass.message(_("Restoring user mask (MASK)...")) try: grass.run_command('g.rename', quiet=quiet, raster=(usermask, 'MASK')) except CalledModuleError: grass.warning(_("Failed to restore user MASK!")) usermask = None # set region to original extents, align to input grass.run_command('g.region', n=reg_org['n'], s=reg_org['s'], e=reg_org['e'], w=reg_org['w'], align=input) # patch orig and fill map grass.message(_("Patching fill data into NULL areas...")) # we can use --o here as g.parser already checks on startup grass.run_command('r.patch', input=(input, prefix + 'filled'), output=output, overwrite=True) # restore the real region grass.del_temp_region() grass.message(_("Filled raster map is: %s") % output) # write cmd history: grass.raster_history(output) if len(failed_list) > 0: grass.warning( _("Following holes where not filled. Temporary maps with are left " "in place to allow examination of unfilled holes")) outlist = failed_list[0] for hole in failed_list[1:]: outlist = ', ' + outlist grass.message(outlist) grass.message(_("Done."))
def writeAvi(filename, images, duration=0.1, encoding='mpeg4', inputOptions='', outputOptions='', bg_task=False): """Export movie to a AVI file, which is encoded with the given encoding. Hint for Windows users: the 'msmpeg4v2' codec is natively supported on Windows. Images should be a list consisting of PIL images or numpy arrays. The latter should be between 0 and 255 for integer types, and between 0 and 1 for float types. Requires the "ffmpeg" application: * Most linux users can install using their package manager * There is a windows installer on the visvis website :param str filename: output filename :param images: :param float duration: :param str encoding: the encoding type :param inputOptions: :param outputOptions: :param bool bg_task: if thread background task, not raise but return error message :return str: error message """ # Get fps try: fps = float(1.0 / duration) except Exception: raise ValueError(_('Invalid duration parameter for writeAvi.')) # Determine temp dir and create images tempDir = os.path.join(os.path.expanduser('~'), '.tempIms') images2ims.writeIms(os.path.join(tempDir, 'im*.png'), images) # Determine formatter N = len(images) formatter = '%04d' if N < 10: formatter = '%d' elif N < 100: formatter = '%02d' elif N < 1000: formatter = '%03d' # Compile command to create avi command = "ffmpeg -r %i %s " % (int(fps), inputOptions) command += "-i im%s.png " % (formatter, ) command += "-g 1 -vcodec %s %s " % (encoding, outputOptions) command += "output.avi" # Run ffmpeg S = subprocess.Popen(command, shell=True, cwd=tempDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Show what ffmpeg has to say outPut = S.stdout.read() if S.wait(): # Clean up _cleanDir(tempDir) if bg_task: return gscript.decode(outPut) + '\n' + gscript.decode( S.stderr.read()) + '\n' + _('Could not write avi.') else: # An error occurred, show print(gscript.decode(outPut)) print(gscript.decode(S.stderr.read())) raise RuntimeError(_('Could not write avi.')) else: try: # Copy avi shutil.copy(os.path.join(tempDir, 'output.avi'), filename) except Exception as err: # Clean up _cleanDir(tempDir) if bg_task: return str(err) else: raise # Clean up _cleanDir(tempDir)