def write_raster(self, array, output_name, directory='core'): """Write raster to ASCII file. :param array: numpy array :param output_name: output filename :param directory: directory where to write output file """ # set output region (use current region when GridGlobals are not set) if GridGlobals.r: region = Region() region.west = GridGlobals.xllcorner region.south = GridGlobals.xllcorner # TODO: use pygrass API instead region.east = region.west + (GridGlobals.c * GridGlobals.dx) region.north = region.south + (GridGlobals.r * GridGlobals.dy) region.cols = GridGlobals.c region.rows = GridGlobals.r region.write() # TBD: extend pygrass to export array directly to specified # external format numpy2raster(array, "FCELL", output_name, overwrite=True) file_output = self._raster_output_path(output_name, directory) Module('r.out.gdal', input=output_name, output=file_output, format='AAIGrid', nodata=GridGlobals.NoDataValue, overwrite=True) self._print_array_stats(array, file_output)
def df2raster(df, newrastname, mtype='CELL'): """Writes a pandas dataframe to a GRASS raster""" from grass.pygrass.raster import numpy2raster numpy2raster(df.values, mtype, newrastname, overwrite=True) return 0
def _predict_multi(self, estimator, region, indexes, class_labels, height, func, output, overwrite): # create and open rasters for writing if incremental reading if height is not None: dst = [] for i, label in enumerate(class_labels): rastername = output + "_" + str(label) dst.append(RasterRow(rastername)) dst[i].open("w", mtype="FCELL", overwrite=overwrite) # create data reader generator n_windows = len([i for i in self.row_windows(height=height)]) data_gen = ( (wi, self.read(rows=rows)) for wi, rows in enumerate(self.row_windows(height=height))) # perform prediction try: if height is not None: for wi, arr in data_gen: gs.percent(wi, n_windows, 1) result = func(arr, estimator) result = np.ma.filled(result, np.nan) # write multiple features to GRASS GIS rasters for i, arr_index in enumerate(indexes): for row in range(result.shape[1]): newrow = Buffer((region.cols, ), mtype="FCELL") newrow[:] = result[arr_index, row, :] dst[i].put_row(newrow) else: arr = self.read() result = func(arr, estimator) result = np.ma.filled(result, np.nan) for i, arr_index in enumerate(indexes): numpy2raster( result[arr_index, :, :], mtype="FCELL", rastname=rastername[i], overwrite=overwrite, ) except: gs.fatal("Error in raster prediction") finally: if height is not None: for i in dst: i.close() return RasterStack([i.name for i in dst])
def main(): try: import pysptools.eea as eea except ImportError: gs.fatal(_("Cannot import pysptools \ (https://pypi.python.org/pypi/pysptools) library." " Please install it (pip install pysptools)" " or ensure that it is on path" " (use PYTHONPATH variable).")) try: # sklearn is a dependency of used pysptools functionality import sklearn except ImportError: gs.fatal(_("Cannot import sklearn \ (https://pypi.python.org/pypi/scikit-learn) library." " Please install it (pip install scikit-learn)" " or ensure that it is on path" " (use PYTHONPATH variable).")) try: from cvxopt import solvers, matrix except ImportError: gs.fatal(_("Cannot import cvxopt \ (https://pypi.python.org/pypi/cvxopt) library." " Please install it (pip install cvxopt)" " or ensure that it is on path" " (use PYTHONPATH variable).")) # Parse input options input = options['input'] output = options['output'] prefix = options['prefix'] endmember_n = int(options['endmember_n']) endmembers = options['endmembers'] if options['maxit']: maxit = options['maxit'] else: maxit = 0 extraction_method = options['extraction_method'] unmixing_method = options['unmixing_method'] atgp_init = True if not flags['n'] else False # List maps in imagery group try: maps = gs.read_command('i.group', flags='g', group=input, quiet=True).rstrip('\n').split('\n') except: pass # Validate input # q and maxit can be None according to manual, but does not work in current pysptools version if endmember_n <= 0: gs.fatal('Number of endmembers has to be > 0') """if (extraction_method == 'PPI' or extraction_method == 'NFINDR'): gs.fatal('Extraction methods PPI and NFINDR require endmember_n >= 2') endmember_n = None""" if maxit <= 0: maxit = 3 * len(maps) if endmember_n > len(maps) + 1: gs.warning('More endmembers ({}) requested than bands in \ input imagery group ({})'.format(endmember_n, len(maps))) if extraction_method != 'PPI': gs.fatal('Only PPI method can extract more endmembers than number \ of bands in the imagery group') if not atgp_init and extraction_method != 'NFINDR': gs.verbose('ATGP is only taken into account in \ NFINDR extraction method...') # Get metainformation from input bands band_types = {} img = None n = 0 gs.verbose('Reading imagery group...') for m in maps: map = m.split('@') # Build numpy stack from imagery group raster = r.raster2numpy(map[0], mapset=map[1]) if raster == np.float64: raster = float32(raster) gs.warning('{} is of type Float64.\ Float64 is currently not supported.\ Reducing precision to Float32'.format(raster)) # Determine map type band_types[map[0]] = get_rastertype(raster) # Create cube and mask from GRASS internal NoData value if n == 0: img = raster # Create mask from GRASS internal NoData value mask = mask_rasternd(raster) else: img = np.dstack((img, raster)) mask = np.logical_and((mask_rasternd(raster)), mask) n = n + 1 # Read a mask if present and give waringing if not # Note that otherwise NoData is read as values gs.verbose('Checking for MASK...') try: MASK = r.raster2numpy('MASK', mapset=getenv('MAPSET')) == 1 mask = np.logical_and(MASK, mask) MASK = None except: pass if extraction_method == 'NFINDR': # Extract endmembers from valid pixels using NFINDR function from pysptools gs.verbose('Extracting endmembers using NFINDR...') nfindr = eea.NFINDR() E = nfindr.extract(img, endmember_n, maxit=maxit, normalize=False, ATGP_init=atgp_init, mask=mask) elif extraction_method == 'PPI': # Extract endmembers from valid pixels using PPI function from pysptools gs.verbose('Extracting endmembers using PPI...') ppi = eea.PPI() E = ppi.extract(img, endmember_n, numSkewers=10000, normalize=False, mask=mask) elif extraction_method == 'FIPPI': # Extract endmembers from valid pixels using FIPPI function from pysptools gs.verbose('Extracting endmembers using FIPPI...') fippi = eea.FIPPI() # q and maxit can be None according to manual, but does not work """if not maxit and not endmember_n: E = fippi.extract(img, q=None, normalize=False, mask=mask) if not maxit: E = fippi.extract(img, q=endmember_n, normalize=False, mask=mask) if not endmember_n: E = fippi.extract(img, q=int(), maxit=maxit, normalize=False, mask=mask) else: E = fippi.extract(img, q=endmember_n, maxit=maxit, normalize=False, mask=mask)""" E = fippi.extract(img, q=endmember_n, maxit=maxit, normalize=False, mask=mask) # Write output file in format required for i.spec.unmix addon if output: gs.verbose('Writing spectra file...') n = 0 with open(output, 'w') as o: o.write('# Channels: {}\n'.format('\t'.join(band_types.keys()))) o.write('# Wrote {} spectra line wise.\n#\n'.format(endmember_n)) o.write('Matrix: {0} by {1}\n'.format(endmember_n, len(maps))) for e in E: o.write('row{0}: {1}\n'.format(n, '\t'.join([str(i) for i in e]))) n = n + 1 # Write vector map with endmember information if requested if endmembers: gs.verbose('Writing vector map with endmembers...') from grass.pygrass import utils as u from grass.pygrass.gis.region import Region from grass.pygrass.vector import Vector from grass.pygrass.vector import VectorTopo from grass.pygrass.vector.geometry import Point # Build attribute table # Deinfe columns for attribute table cols = [(u'cat', 'INTEGER PRIMARY KEY')] for b in band_types.keys(): cols.append((b.replace('.','_'), band_types[b])) # Get region information reg = Region() # Create vector map new = Vector(endmembers) new.open('w', tab_name=endmembers, tab_cols=cols) cat = 1 for e in E: # Get indices idx = np.where((img[:,:]==e).all(-1)) # Numpy array is ordered rows, columns (y,x) if len(idx[0]) == 0 or len(idx[1]) == 0: gs.warning('Could not compute coordinated for endmember {}. \ Please consider rescaling your data to integer'.format(cat)) cat = cat + 1 continue coords = u.pixel2coor((idx[1][0], idx[0][0]), reg) point = Point(coords[1] + reg.ewres / 2.0, coords[0] - reg.nsres / 2.0) # Get attributes n = 0 attr = [] for b in band_types.keys(): if band_types[b] == u'INTEGER': attr.append(int(e[n])) else: attr.append(float(e[n])) n = n + 1 # Write geometry with attributes new.write(point, cat=cat, attrs=tuple(attr)) cat = cat + 1 # Close vector map new.table.conn.commit() new.close(build=True) if prefix: # Run spectral unmixing import pysptools.abundance_maps as amaps if unmixing_method == 'FCLS': fcls = amaps.FCLS() result = fcls.map(img, E, normalize=False, mask=mask) elif unmixing_method == 'NNLS': nnls = amaps.NNLS() result = nnls.map(img, E, normalize=False, mask=mask) elif unmixing_method == 'UCLS': ucls = amaps.UCLS() result = ucls.map(img, E, normalize=False, mask=mask) # Write results for l in range(endmember_n): rastname = '{0}_{1}'.format(prefix, l + 1) r.numpy2raster(result[:,:,l], 'FCELL', rastname)
def test_write(self): ran = random([40, 60]) numpy2raster(ran, 'FCELL', self.name, True) self.assertTrue(check_raster(self.name))
def predict(self, estimator, output, height=None, overwrite=False): """Prediction method for RasterStack class Parameters ---------- estimator : estimator object implementing 'fit' The object to use to fit the data. output : str Output name for prediction raster. height : int (opt). Number of raster rows to pass to estimator at one time. If not specified then the entire raster is read into memory. overwrite : bool (opt). Default is False Option to overwrite an existing raster. Returns ------- RasterStack """ reg = Region() func = self._pred_fun # determine dtype test_window = list(self.row_windows(height=1))[0] img = self.read(rows=test_window) result = func(img, estimator) try: np.finfo(result.dtype) mtype = "FCELL" nodata = np.nan except: mtype = "CELL" nodata = -2147483648 # determine whether multi-target if result.shape[0] > 1: n_outputs = result.shape[result.ndim - 1] else: n_outputs = 1 indexes = np.arange(0, n_outputs) # chose prediction function if len(indexes) == 1: func = self._pred_fun else: func = self._predfun_multioutput if len(indexes) > 1: result_stack = self._predict_multi(estimator, reg, indexes, indexes, height, func, output, overwrite) else: if height is not None: with RasterRow(output, mode="w", mtype=mtype, overwrite=overwrite) as dst: n_windows = len( [i for i in self.row_windows(height=height)]) data_gen = ((wi, self.read(rows=rows)) for wi, rows in enumerate( self.row_windows(height=height))) for wi, arr in data_gen: gs.percent(wi, n_windows, 1) result = func(arr, estimator) result = np.ma.filled(result, nodata) # writing data to GRASS raster row-by-row for i in range(result.shape[1]): newrow = Buffer((reg.cols, ), mtype=mtype) newrow[:] = result[0, i, :] dst.put_row(newrow) else: arr = self.read() result = func(arr, estimator) result = np.ma.filled(result, nodata) numpy2raster(result[0, :, :], mtype=mtype, rastname=output, overwrite=overwrite) result_stack = RasterStack(output) return result_stack
def main(): """Do the main work""" # set numpy printing options np.set_printoptions(formatter={"float": lambda x: "{0:0.2f}".format(x)}) # ========================================================================== # Input data # ========================================================================== # Required r_output = options["output"] r_dsm = options["input"] dsm_type = grass.parse_command("r.info", map=r_dsm, flags="g")["datatype"] # Test if DSM exist gfile_dsm = grass.find_file(name=r_dsm, element="cell") if not gfile_dsm["file"]: grass.fatal("Raster map <{}> not found".format(r_dsm)) # Exposure settings v_source = options["sampling_points"] r_source = options["source"] source_cat = options["sourcecat"] r_weights = options["weights"] # test if source vector map exist and contains points if v_source: gfile_vsource = grass.find_file(name=v_source, element="vector") if not gfile_vsource["file"]: grass.fatal("Vector map <{}> not found".format(v_source)) if not grass.vector.vector_info_topo(v_source, layer=1)["points"] > 0: grass.fatal("Vector map <{}> does not contain any points.".format( v_source)) if r_source: gfile_rsource = grass.find_file(name=r_source, element="cell") if not gfile_rsource["file"]: grass.fatal("Raster map <{}> not found".format(r_source)) # if source_cat is set, check that r_source is CELL source_datatype = grass.parse_command("r.info", map=r_source, flags="g")["datatype"] if source_cat != "*" and source_datatype != "CELL": grass.fatal( "The raster map <%s> must be integer (CELL type) in order to \ use the 'sourcecat' parameter" % r_source) if r_weights: gfile_weights = grass.find_file(name=r_weights, element="cell") if not gfile_weights["file"]: grass.fatal("Raster map <{}> not found".format(r_weights)) # Viewshed settings range_inp = float(options["range"]) v_elevation = float(options["observer_elevation"]) b_1 = float(options["b1_distance"]) pfunction = options["function"] refr_coeff = float(options["refraction_coeff"]) flagstring = "" if flags["r"]: flagstring += "r" if flags["c"]: flagstring += "c" # test values if v_elevation < 0.0: grass.fatal("Observer elevation must be larger than or equal to 0.0.") if range_inp <= 0.0 and range_inp != -1: grass.fatal("Exposure range must be larger than 0.0.") if pfunction == "Fuzzy_viewshed" and range_inp == -1: grass.fatal("Exposure range cannot be \ infinity for fuzzy viewshed approch.") if pfunction == "Fuzzy_viewshed" and b_1 > range_inp: grass.fatal("Exposure range must be larger than radius around \ the viewpoint where clarity is perfect.") # Sampling settings source_sample_density = float(options["sample_density"]) seed = options["seed"] if not seed: # if seed is not set, set it to process number seed = os.getpid() # Optional cores = int(options["nprocs"]) memory = int(options["memory"]) # ========================================================================== # Region settings # ========================================================================== # check that location is not in lat/long if grass.locn_is_latlong(): grass.fatal("The analysis is not available for lat/long coordinates.") # get comp. region parameters reg = Region() # check that NSRES equals EWRES if abs(reg.ewres - reg.nsres) > 1e-6: grass.fatal("Variable north-south and east-west 2D grid resolution \ is not supported") # adjust exposure range as a multiplicate of region resolution # if infinite, set exposure range to the max of region size if range_inp != -1: multiplicate = math.floor(range_inp / reg.nsres) exp_range = multiplicate * reg.nsres else: range_inf = max(reg.north - reg.south, reg.east - reg.west) multiplicate = math.floor(range_inf / reg.nsres) exp_range = multiplicate * reg.nsres if RasterRow("MASK", Mapset().name).exist(): grass.warning("Current MASK is temporarily renamed.") unset_mask() # ========================================================================== # Random sample exposure source with target points T # ========================================================================== if v_source: # go for using input vector map as sampling points v_source_sample = v_source grass.verbose("Using sampling points from input vector map") else: # go for sampling # min. distance between samples set to half of region resolution # (issue in r.random.cells) sample_distance = reg.nsres / 2 v_source_sample = sample_raster_with_points( r_source, source_cat, source_sample_density, sample_distance, "{}_rand_pts_vect".format(TEMPNAME), seed, ) # ========================================================================== # Get coordinates and attributes of target points T # ========================================================================== # Prepare a list of maps to extract attributes from # DSM values attr_map_list = [r_dsm] if pfunction in ["Solid_angle", "Visual_magnitude"]: grass.verbose("Precomputing parameter maps...") # Precompute values A, B, C, D for solid angle function # using moving window [row, col] if pfunction == "Solid_angle": r_a_z = "{}_A_z".format(TEMPNAME) r_b_z = "{}_B_z".format(TEMPNAME) r_c_z = "{}_C_z".format(TEMPNAME) r_d_z = "{}_D_z".format(TEMPNAME) expr = ";".join([ "$outmap_A = ($inmap[0, 0] + \ $inmap[0, -1] + \ $inmap[1, -1] + \ $inmap[1, 0]) / 4", "$outmap_B = ($inmap[-1, 0] + \ $inmap[-1, -1] + \ $inmap[0, -1] + \ $inmap[0, 0]) / 4", "$outmap_C = ($inmap[-1, 1] + \ $inmap[-1, 0] + \ $inmap[0, 0] + \ $inmap[0, 1]) / 4", "$outmap_D = ($inmap[0, 1] + \ $inmap[0, 0] + \ $inmap[1, 0] + \ $inmap[1, 1]) / 4", ]) grass.mapcalc( expr, inmap=r_dsm, outmap_A=r_a_z, outmap_B=r_b_z, outmap_C=r_c_z, outmap_D=r_d_z, overwrite=True, quiet=grass.verbosity() <= 1, ) attr_map_list.extend([r_a_z, r_b_z, r_c_z, r_d_z]) # Precompute values slopes in e-w direction, n-s direction # as atan(dz/dx) (e-w direction), atan(dz/dy) (n-s direction) # using moving window [row, col] elif pfunction == "Visual_magnitude": r_slope_ew = "{}_slope_ew".format(TEMPNAME) r_slope_ns = "{}_slope_ns".format(TEMPNAME) expr = ";".join([ "$outmap_ew = atan((sqrt(2) * $inmap[-1, 1] + \ 2 * $inmap[0, 1] + \ sqrt(2) * $inmap[1, 1] - \ sqrt(2) * $inmap[-1, -1] - \ 2 * $inmap[0, -1] - \ sqrt(2) * $inmap[1, -1]) / \ (8 * $w_ew))", "$outmap_ns = atan((sqrt(2) * $inmap[-1, -1] + \ 2 * $inmap[-1, 0] + \ sqrt(2) * $inmap[-1, 1] - \ sqrt(2) * $inmap[1, -1] - \ 2 * $inmap[1, 0] - \ sqrt(2) * $inmap[1, 1]) / \ (8 * $w_ns))", ]) grass.mapcalc( expr, inmap=r_dsm, outmap_ew=r_slope_ew, outmap_ns=r_slope_ns, w_ew=reg.ewres, w_ns=reg.nsres, overwrite=True, quiet=grass.verbosity() <= 1, ) attr_map_list.extend([r_slope_ew, r_slope_ns]) # Use viewshed weights if provided if r_weights: attr_map_list.append(r_weights) # Extract attribute values target_pts_grass = grass.read_command( "r.what", flags="v", map=attr_map_list, points=v_source_sample, separator="|", null_value="*", quiet=True, ) # columns to use depending on parametrization function usecols = list(range(0, 4 + len(attr_map_list))) usecols.remove(3) # skip 3rd column - site_name # convert coordinates and attributes of target points T to numpy array target_pts_np = txt2numpy( target_pts_grass, sep="|", names=None, null_value="*", usecols=usecols, structured=False, ) # if one point only - 0D array which cannot be used in iteration if target_pts_np.ndim == 1: target_pts_np = target_pts_np.reshape(1, -1) target_pts_np = target_pts_np[~np.isnan(target_pts_np).any(axis=1)] no_points = target_pts_np.shape[0] # if viewshed weights not set by flag - set weight to 1 for all pts if not r_weights: weights_np = np.ones((no_points, 1)) target_pts_np = np.hstack((target_pts_np, weights_np)) grass.debug("target_pts_np: {}".format(target_pts_np)) # ========================================================================== # Calculate weighted parametrised cummulative viewshed # by iterating over target points T # ========================================================================== grass.verbose("Calculating partial viewsheds...") # Parametrisation function if pfunction == "Solid_angle": parametrise_viewshed = solid_angle_reverse elif pfunction == "Distance_decay": parametrise_viewshed = distance_decay_reverse elif pfunction == "Fuzzy_viewshed": parametrise_viewshed = fuzzy_viewshed_reverse elif pfunction == "Visual_magnitude": parametrise_viewshed = visual_magnitude_reverse else: parametrise_viewshed = binary # Collect variables that will be used in do_it_all() into a dictionary global_vars = { "region": reg, "range": exp_range, "param_viewshed": parametrise_viewshed, "observer_elevation": v_elevation, "b_1": b_1, "memory": memory, "refr_coeff": refr_coeff, "flagstring": flagstring, "r_dsm": r_dsm, "dsm_type": dsm_type, "cores": cores, "tempname": TEMPNAME, } # Split target points to chunks for each core target_pnts = np.array_split(target_pts_np, cores) # Combine each chunk with dictionary combo = list(zip(itertools.repeat(global_vars), target_pnts)) # Calculate partial cummulative viewshed with Pool(cores) as pool: np_sum = pool.starmap(do_it_all, combo) pool.close() pool.join() # We should probably use nansum here? all_nan = np.all(np.isnan(np_sum), axis=0) np_sum = np.nansum(np_sum, axis=0, dtype=np.single) np_sum[all_nan] = np.nan grass.verbose("Writing final result and cleaning up...") # Restore original computational region reg.read() reg.set_current() reg.set_raster_region() # Convert numpy array of cummulative viewshed to raster numpy2raster(np_sum, mtype="FCELL", rastname=r_output, overwrite=True) # Remove temporary files and reset mask if needed cleanup() # Set raster history to output raster grass.raster_history(r_output, overwrite=True) grass.run_command( "r.support", overwrite=True, map=r_output, title="Visual exposure index as {}".format(pfunction.replace("_", " ")), description="generated by r.viewshed.exposure", units="Index value", quiet=True, )