def _check(value, path, type): """Private function to check the correctness of a value. :param value: Name of the directory :type value: str :param path: Path where the directory is located :type path: path :param type: it is a string defining the type that will e checked, valid types are: GISBASE, GISDBASE, LOCATION_NAME, MAPSET :type type: str :return: the value if verify else None and if value is empty return environmental variable :rtype: str """ if value and CHECK_IS[type](join(path, value)): return value elif value is '': from grass.pygrass.utils import getenv return getenv(type) else: raise GrassError("%s <%s> not found" % (type.title(), join(path, value)))
def __init__(self, start_time, end_time, dtype, mkeys): assert isinstance(start_time, datetime), \ "start_time not a datetime object!" assert isinstance(end_time, datetime), \ "end_time not a datetime object!" assert start_time <= end_time, "start_time > end_time!" self.start_time = start_time self.end_time = end_time self.dtype = dtype self.region = Region() self.xr = self.region.cols self.yr = self.region.rows # Check if region is at least 3x3 if self.xr < 3 or self.yr < 3: msgr.fatal(u"GRASS Region should be at least 3 cells by 3 cells") self.dx = self.region.ewres self.dy = self.region.nsres self.reg_bbox = { 'e': self.region.east, 'w': self.region.west, 'n': self.region.north, 's': self.region.south } self.overwrite = gscript.overwrite() self.mapset = gutils.getenv('MAPSET') self.maps = dict.fromkeys(mkeys) # init temporal module tgis.init() assert os.path.isfile(self.rules_h) assert os.path.isfile(self.rules_v) assert os.path.isfile(self.rules_def)
def format_id(name): """Take a map or stds name as input and return a fully qualified name, i.e. including mapset """ if '@' in name: return name else: return '@'.join((name, gutils.getenv('MAPSET')))
def format_id(name): '''Take a map or stds name as input and return a fully qualified name, i.e. including mapset ''' mapset = gutils.getenv('MAPSET') if '@' in name: return name else: return '@'.join((name, mapset))
def __init__(self, start_time, end_time, dtype, mkeys, region_id, raster_mask_id): assert isinstance(start_time, datetime), \ "start_time not a datetime object!" assert isinstance(end_time, datetime), \ "end_time not a datetime object!" assert start_time <= end_time, "start_time > end_time!" self.region_id = region_id self.raster_mask_id = raster_mask_id self.start_time = start_time self.end_time = end_time self.dtype = dtype self.old_mask_name = None # LatLon is not supported if gscript.locn_is_latlong(): msgr.fatal(u"latlong location is not supported. " u"Please use a projected location") # Set region if self.region_id: gscript.use_temp_region() gscript.run_command("g.region", region=region_id) self.region = Region() self.xr = self.region.cols self.yr = self.region.rows # Check if region is at least 3x3 if self.xr < 3 or self.yr < 3: msgr.fatal(u"GRASS Region should be at least 3 cells by 3 cells") self.dx = self.region.ewres self.dy = self.region.nsres self.reg_bbox = { 'e': self.region.east, 'w': self.region.west, 'n': self.region.north, 's': self.region.south } # Set temporary mask if self.raster_mask_id: self.set_temp_mask() self.overwrite = gscript.overwrite() self.mapset = gutils.getenv('MAPSET') self.maps = dict.fromkeys(mkeys) # init temporal module tgis.init() # Create thread and queue for writing raster maps self.raster_lock = Lock() self.raster_writer_queue = Queue(maxsize=15) worker_args = (self.raster_writer_queue, self.raster_lock) self.raster_writer_thread = Thread(name="RasterWriter", target=raster_writer, args=worker_args) self.raster_writer_thread.start()
def switch_through_locations(queue): """Switches through a list of locations""" # Just to be sure we don't influence other tests. # pylint: disable=import-outside-toplevel import grass.pygrass.utils as pygrass_utils import grass.lib.gis as libgis names = [] for location_name in ["test1", "test2", "abc"]: # pylint: disable=protected-access gs.core._create_location_xy(tmp_path, location_name) with grass_setup.init(tmp_path / location_name): libgis.G__read_gisrc_path() libgis.G__read_gisrc_env() names.append( (pygrass_utils.getenv("LOCATION_NAME"), location_name)) queue.put(names)
def main(): try: import pysptools.eea as eea except ImportError: gs.fatal(_("Cannot import pysptools \ (https://pypi.python.org/pypi/pysptools) library." " Please install it (pip install pysptools)" " or ensure that it is on path" " (use PYTHONPATH variable).")) try: # sklearn is a dependency of used pysptools functionality import sklearn except ImportError: gs.fatal(_("Cannot import sklearn \ (https://pypi.python.org/pypi/scikit-learn) library." " Please install it (pip install scikit-learn)" " or ensure that it is on path" " (use PYTHONPATH variable).")) try: from cvxopt import solvers, matrix except ImportError: gs.fatal(_("Cannot import cvxopt \ (https://pypi.python.org/pypi/cvxopt) library." " Please install it (pip install cvxopt)" " or ensure that it is on path" " (use PYTHONPATH variable).")) # Parse input options input = options['input'] output = options['output'] prefix = options['prefix'] endmember_n = int(options['endmember_n']) endmembers = options['endmembers'] if options['maxit']: maxit = options['maxit'] else: maxit = 0 extraction_method = options['extraction_method'] unmixing_method = options['unmixing_method'] atgp_init = True if not flags['n'] else False # List maps in imagery group try: maps = gs.read_command('i.group', flags='g', group=input, quiet=True).rstrip('\n').split('\n') except: pass # Validate input # q and maxit can be None according to manual, but does not work in current pysptools version if endmember_n <= 0: gs.fatal('Number of endmembers has to be > 0') """if (extraction_method == 'PPI' or extraction_method == 'NFINDR'): gs.fatal('Extraction methods PPI and NFINDR require endmember_n >= 2') endmember_n = None""" if maxit <= 0: maxit = 3 * len(maps) if endmember_n > len(maps) + 1: gs.warning('More endmembers ({}) requested than bands in \ input imagery group ({})'.format(endmember_n, len(maps))) if extraction_method != 'PPI': gs.fatal('Only PPI method can extract more endmembers than number \ of bands in the imagery group') if not atgp_init and extraction_method != 'NFINDR': gs.verbose('ATGP is only taken into account in \ NFINDR extraction method...') # Get metainformation from input bands band_types = {} img = None n = 0 gs.verbose('Reading imagery group...') for m in maps: map = m.split('@') # Build numpy stack from imagery group raster = r.raster2numpy(map[0], mapset=map[1]) if raster == np.float64: raster = float32(raster) gs.warning('{} is of type Float64.\ Float64 is currently not supported.\ Reducing precision to Float32'.format(raster)) # Determine map type band_types[map[0]] = get_rastertype(raster) # Create cube and mask from GRASS internal NoData value if n == 0: img = raster # Create mask from GRASS internal NoData value mask = mask_rasternd(raster) else: img = np.dstack((img, raster)) mask = np.logical_and((mask_rasternd(raster)), mask) n = n + 1 # Read a mask if present and give waringing if not # Note that otherwise NoData is read as values gs.verbose('Checking for MASK...') try: MASK = r.raster2numpy('MASK', mapset=getenv('MAPSET')) == 1 mask = np.logical_and(MASK, mask) MASK = None except: pass if extraction_method == 'NFINDR': # Extract endmembers from valid pixels using NFINDR function from pysptools gs.verbose('Extracting endmembers using NFINDR...') nfindr = eea.NFINDR() E = nfindr.extract(img, endmember_n, maxit=maxit, normalize=False, ATGP_init=atgp_init, mask=mask) elif extraction_method == 'PPI': # Extract endmembers from valid pixels using PPI function from pysptools gs.verbose('Extracting endmembers using PPI...') ppi = eea.PPI() E = ppi.extract(img, endmember_n, numSkewers=10000, normalize=False, mask=mask) elif extraction_method == 'FIPPI': # Extract endmembers from valid pixels using FIPPI function from pysptools gs.verbose('Extracting endmembers using FIPPI...') fippi = eea.FIPPI() # q and maxit can be None according to manual, but does not work """if not maxit and not endmember_n: E = fippi.extract(img, q=None, normalize=False, mask=mask) if not maxit: E = fippi.extract(img, q=endmember_n, normalize=False, mask=mask) if not endmember_n: E = fippi.extract(img, q=int(), maxit=maxit, normalize=False, mask=mask) else: E = fippi.extract(img, q=endmember_n, maxit=maxit, normalize=False, mask=mask)""" E = fippi.extract(img, q=endmember_n, maxit=maxit, normalize=False, mask=mask) # Write output file in format required for i.spec.unmix addon if output: gs.verbose('Writing spectra file...') n = 0 with open(output, 'w') as o: o.write('# Channels: {}\n'.format('\t'.join(band_types.keys()))) o.write('# Wrote {} spectra line wise.\n#\n'.format(endmember_n)) o.write('Matrix: {0} by {1}\n'.format(endmember_n, len(maps))) for e in E: o.write('row{0}: {1}\n'.format(n, '\t'.join([str(i) for i in e]))) n = n + 1 # Write vector map with endmember information if requested if endmembers: gs.verbose('Writing vector map with endmembers...') from grass.pygrass import utils as u from grass.pygrass.gis.region import Region from grass.pygrass.vector import Vector from grass.pygrass.vector import VectorTopo from grass.pygrass.vector.geometry import Point # Build attribute table # Deinfe columns for attribute table cols = [(u'cat', 'INTEGER PRIMARY KEY')] for b in band_types.keys(): cols.append((b.replace('.','_'), band_types[b])) # Get region information reg = Region() # Create vector map new = Vector(endmembers) new.open('w', tab_name=endmembers, tab_cols=cols) cat = 1 for e in E: # Get indices idx = np.where((img[:,:]==e).all(-1)) # Numpy array is ordered rows, columns (y,x) if len(idx[0]) == 0 or len(idx[1]) == 0: gs.warning('Could not compute coordinated for endmember {}. \ Please consider rescaling your data to integer'.format(cat)) cat = cat + 1 continue coords = u.pixel2coor((idx[1][0], idx[0][0]), reg) point = Point(coords[1] + reg.ewres / 2.0, coords[0] - reg.nsres / 2.0) # Get attributes n = 0 attr = [] for b in band_types.keys(): if band_types[b] == u'INTEGER': attr.append(int(e[n])) else: attr.append(float(e[n])) n = n + 1 # Write geometry with attributes new.write(point, cat=cat, attrs=tuple(attr)) cat = cat + 1 # Close vector map new.table.conn.commit() new.close(build=True) if prefix: # Run spectral unmixing import pysptools.abundance_maps as amaps if unmixing_method == 'FCLS': fcls = amaps.FCLS() result = fcls.map(img, E, normalize=False, mask=mask) elif unmixing_method == 'NNLS': nnls = amaps.NNLS() result = nnls.map(img, E, normalize=False, mask=mask) elif unmixing_method == 'UCLS': ucls = amaps.UCLS() result = ucls.map(img, E, normalize=False, mask=mask) # Write results for l in range(endmember_n): rastname = '{0}_{1}'.format(prefix, l + 1) r.numpy2raster(result[:,:,l], 'FCELL', rastname)
def is_current(self): """Check if the MAPSET is the working MAPSET""" return (self.name == getenv("MAPSET") and self.location == getenv("LOCATION_NAME") and self.gisdbase == getenv("GISDBASE"))
def open( self, mode=None, layer=1, overwrite=None, with_z=None, # parameters valid only if mode == 'w' tab_name='', tab_cols=None, link_name=None, link_key='cat', link_db='$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db', link_driver='sqlite'): """Open a Vector map. :param mode: open a vector map in ``r`` in reading, ``w`` in writing and in ``rw`` read and write mode :type mode: str :param layer: specify the layer that you want to use :type layer: int :param overwrite: valid only for ``w`` mode :type overwrite: bool :param with_z: specify if vector map must be open with third dimension enabled or not. Valid only for ``w`` mode, default: False :type with_z: bool :param tab_name: define the name of the table that will be generate :type tab_name: str :param tab_cols: define the name and type of the columns of the attribute table of the vecto map :type tab_cols: list of pairs :param link_name: define the name of the link connecttion with the database :type link_name: str :param link_key: define the nema of the column that will be use as vector category :type link_key: str :param link_db: define the database connection parameters :type link_db: str :param link_driver: define witch database driver will be used :param link_driver: str Some of the parameters are valid only with mode ``w`` or ``rw`` See more examples in the documentation of the ``read`` and ``write`` methods """ with_z = libvect.WITH_Z if with_z else libvect.WITHOUT_Z # check if map exists or not if not self.exist() and mode != 'w': raise OpenError("Map <%s> not found." % self._name) if libvect.Vect_set_open_level(self._topo_level) != 0: raise OpenError("Invalid access level.") # update the overwrite attribute self.overwrite = overwrite if overwrite is not None else self.overwrite # check if the mode is valid if mode not in ('r', 'rw', 'w'): raise ValueError("Mode not supported. Use one of: 'r', 'rw', 'w'.") # check if the map exist if self.exist() and mode in ('r', 'rw'): # open in READ mode if mode == 'r': openvect = libvect.Vect_open_old2(self.c_mapinfo, self.name, self.mapset, str(layer)) # open in READ and WRITE mode elif mode == 'rw': openvect = libvect.Vect_open_update2(self.c_mapinfo, self.name, self.mapset, str(layer)) # instantiate class attributes self.dblinks = DBlinks(self.c_mapinfo) # If it is opened in write mode if mode == 'w': openvect = libvect.Vect_open_new(self.c_mapinfo, self.name, with_z) self.dblinks = DBlinks(self.c_mapinfo) if mode in ('w', 'rw') and tab_cols: # create a link link = Link(layer, link_name if link_name else self.name, tab_name if tab_name else self.name, link_key, link_db, link_driver) # add the new link self.dblinks.add(link) # create the table table = link.table() table.create(tab_cols, overwrite=overwrite) table.conn.commit() # check the C function result. if openvect == -1: str_err = "Not able to open the map, C function return %d." raise OpenError(str_err % openvect) if len(self.dblinks) == 0: self.layer = layer self.table = None self.n_lines = 0 else: self.layer = self.dblinks.by_layer(layer).layer self.table = self.dblinks.by_layer(layer).table() self.n_lines = self.table.n_rows() self.writeable = self.mapset == utils.getenv("MAPSET") # Initialize the finder self.find = { 'by_point': PointFinder(self.c_mapinfo, self.table, self.writeable), 'by_bbox': BboxFinder(self.c_mapinfo, self.table, self.writeable), 'by_polygon': PolygonFinder(self.c_mapinfo, self.table, self.writeable), } self.find_by_point = self.find["by_point"] self.find_by_bbox = self.find["by_bbox"] self.find_by_polygon = self.find["by_polygon"]
def open(self, mode=None, layer=1, overwrite=None, with_z=None, # parameters valid only if mode == 'w' tab_name='', tab_cols=None, link_name=None, link_key='cat', link_db='$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db', link_driver='sqlite'): """Open a Vector map. :param mode: open a vector map in ``r`` in reading, ``w`` in writing and in ``rw`` read and write mode :type mode: str :param layer: specify the layer that you want to use :type layer: int :param overwrite: valid only for ``w`` mode :type overwrite: bool :param with_z: specify if vector map must be open with third dimension enabled or not. Valid only for ``w`` mode, default: False :type with_z: bool :param tab_name: define the name of the table that will be generate :type tab_name: str :param tab_cols: define the name and type of the columns of the attribute table of the vecto map :type tab_cols: list of pairs :param link_name: define the name of the link connecttion with the database :type link_name: str :param link_key: define the nema of the column that will be use as vector category :type link_key: str :param link_db: define the database connection parameters :type link_db: str :param link_driver: define witch database driver will be used :param link_driver: str Some of the parameters are valid only with mode ``w`` or ``rw`` See more examples in the documentation of the ``read`` and ``write`` methods """ with_z = libvect.WITH_Z if with_z else libvect.WITHOUT_Z # check if map exists or not if not self.exist() and mode != 'w': raise OpenError("Map <%s> not found." % self._name) if libvect.Vect_set_open_level(self._topo_level) != 0: raise OpenError("Invalid access level.") # update the overwrite attribute self.overwrite = overwrite if overwrite is not None else self.overwrite # check if the mode is valid if mode not in ('r', 'rw', 'w'): raise ValueError("Mode not supported. Use one of: 'r', 'rw', 'w'.") # check if the map exist if self.exist() and mode in ('r', 'rw'): # open in READ mode if mode == 'r': openvect = libvect.Vect_open_old2(self.c_mapinfo, self.name, self.mapset, str(layer)) # open in READ and WRITE mode elif mode == 'rw': openvect = libvect.Vect_open_update2(self.c_mapinfo, self.name, self.mapset, str(layer)) # instantiate class attributes self.dblinks = DBlinks(self.c_mapinfo) # If it is opened in write mode if mode == 'w': openvect = libvect.Vect_open_new(self.c_mapinfo, self.name, with_z) self.dblinks = DBlinks(self.c_mapinfo) if mode in ('w', 'rw') and tab_cols: # create a link link = Link(layer, link_name if link_name else self.name, tab_name if tab_name else self.name, link_key, link_db, link_driver) # add the new link self.dblinks.add(link) # create the table table = link.table() table.create(tab_cols, overwrite=overwrite) table.conn.commit() # check the C function result. if openvect == -1: str_err = "Not able to open the map, C function return %d." raise OpenError(str_err % openvect) if len(self.dblinks) == 0: self.layer = layer self.table = None self.n_lines = 0 else: self.layer = self.dblinks.by_layer(layer).layer self.table = self.dblinks.by_layer(layer).table() self.n_lines = self.table.n_rows() self.writeable = self.mapset == utils.getenv("MAPSET") # Initialize the finder self.find = {'by_point': PointFinder(self.c_mapinfo, self.table, self.writeable), 'by_bbox': BboxFinder(self.c_mapinfo, self.table, self.writeable), 'by_polygon': PolygonFinder(self.c_mapinfo, self.table, self.writeable), } self.find_by_point = self.find["by_point"] self.find_by_bbox = self.find["by_bbox"] self.find_by_polygon = self.find["by_polygon"]