def mask_data(band_filter, cfmask_filter, cloud_mask_value, file_separator):
    # do cleanup first
    grass.run_command('g.remove', type='raster', pattern='*_masked', flags='f', quiet=True)
    
    # find band1 raster maps first
    bands1 = grass.list_grouped('raster', pattern='*{}1*'.format(band_filter))[mapset]
    count = len(bands1)
    i = 0
    for b1 in bands1:
        i += 1
        basename = b1.split(file_separator)[0]
        grass.message("Processing <{0}> ({1}/{2})...".format(basename, i, count))
        grass.percent(i, count, 5)
        # set computational region based on first band
        grass.run_command('g.region', raster=b1)
        maskname = '{}{}{}'.format(basename, file_separator, cfmask_filter)
        mask = grass.find_file(maskname, element='raster')['fullname']
        # apply cloud mask if found
        if mask:
            grass.run_command('r.mask', flags='i', raster=maskname, maskcats=cloud_mask_value, overwrite=True, quiet=True)
        else:
            grass.warning("Mask missing for <{}>".format(basename))
        # create copy of band with mask applied
        bands = grass.list_grouped('raster', pattern='{}{}{}*'.format(basename, file_separator, band_filter))[mapset]
        for b in bands:
            grass.mapcalc('{name}_masked={name}'.format(name=b), quiet=True, overwrite=True)
            grass.run_command('r.colors', map=b, color='grey.eq')
        # remove mask if applied
        if mask:
            grass.run_command('r.mask', flags='r', quiet=True)
Exemplo n.º 2
0
def mask_data():
    bands1 = grass.list_grouped('raster', pattern='lndcal*.1$')[mapset]
    count = len(bands1)
    i = 0
    for b1 in bands1:
        i += 1
        oname = b1.split('.')[1]
        grass.message("Processing <{0}> ({1}/{2})...".format(oname, i, count))
        grass.percent(i, count, 5)
        grass.run_command('g.region', raster=b1)
        mask = grass.find_file('fmask.' + oname, element='raster')['fullname']
        if mask:
            grass.run_command('r.mask',
                              raster='fmask.' + oname,
                              maskcats=1,
                              overwrite=True,
                              quiet=True)
        else:
            grass.warning("Mask missing for <{0}>".format(oname))
        bands = grass.list_grouped(
            'raster', pattern='lndcal.{0}.*'.format(oname))[mapset]
        for b in bands:
            n = b.split('.')[-1]
            grass.mapcalc('{0}.{1}={2}'.format(oname, n, b),
                          quiet=True,
                          overwrite=True)
        if mask:
            grass.run_command('r.mask', flags='r', quiet=True)

    grass.run_command('g.remove',
                      type='raster',
                      pattern='fmask*,lndcal*',
                      flags='f',
                      quiet=True)
    grass.percent(0, 0, 0)
def import_data(directory, file_filter):
    # collect files to be imported
    files = []
    for f in os.listdir(directory):
        if f.endswith(".tif") and [x for x in file_filter if x in f]:
            files.append(f)

    # import selected files into GRASS
    count = len(files)
    i = 0
    for f in files:
        i += 1
        grass.message("Importing <{0}> ({1}/{2})...".format(f, i, count))
        grass.percent(i, count, 2)
        map_name = os.path.splitext(f)[0]
        ###imodule = 'r.external' # ~1sec
        imodule = 'r.in.gdal' # ~4sec
        grass.run_command(imodule, input=os.path.join(directory, f),
                          output=map_name, quiet=True, overwrite=True)
        # set color table for cfmask map
        if 'cfmask' in map_name:
            # 0 clear
            # 1 water
            # 2 shadow
            # 3 snow
            # 4 cloud
            colors = """0 black
1 blue
2 grey
3 white
4 149 186 224"""
            Module('r.colors', map=map_name, rules='-', quiet=True, stdin_=colors)
def extract_data(input, output, cats, value):
    if len(cats) > 20000:
        newcol = "train_val_%s" % (os.getpid())
        columns_existing = grass.vector_columns(options["input"]).keys()
        if newcol not in columns_existing:
            grass.run_command("v.db.addcolumn",
                              map=input,
                              columns="%s INTEGER" % (newcol))
        n = 500
        for i in range(0, len(cats), n):
            cats_list = cats[i:i + n]
            grass.run_command(
                "v.db.update",
                where="cat IN (%s)" % (",".join(cats_list)),
                map=input,
                column=newcol,
                value=value,
                quiet=True,
            )
            grass.percent(i + n, len(cats), 1)
        grass.run_command(
            "v.extract",
            input=input,
            output=output,
            where="%s='%d'" % (newcol, value),
        )
    else:
        grass.run_command("v.extract",
                          input=input,
                          cats=",".join(cats),
                          output=output)
Exemplo n.º 5
0
    def _download(self):
        """!Downloads data from WCS server using GDAL WCS driver

        @return ret (exit code of r.in.gdal module)
        """
        self._debug("_download", "started")

        self.xml_file = self._createXML()
        self.vrt_file = self._createVRT()

        grass.message('Starting module r.in.gdal ...')

        env = os.environ.copy()
        env['GRASS_MESSAGE_FORMAT'] = 'gui'

        if self.params['location'] == "":
            p = grass.start_command('r.in.gdal',
                             input=self.vrt_file,
                             output=self.params['output'],
                             stdout = grass.PIPE,
                             stderr = grass.PIPE,
                             env = env
                                    )


        else:
            p = grass.start_command('r.in.gdal',
                     input=self.vrt_file,
                     output=self.params['output'],
                     location = self.params['location'],
                     stdout = grass.PIPE,
                     stderr=grass.PIPE,
                     env = env
                                    )

        while p.poll() is None:
            line = p.stderr.readline()
            linepercent = line.replace('GRASS_INFO_PERCENT:','').strip()
            if linepercent.isdigit():
                #print linepercent
                grass.percent(int(linepercent),100,1)
            else:
                grass.verbose(line)

        grass.percent(100,100,5)

        ret = p.wait()
        if ret != 0:
            grass.fatal('r.in.gdal for %s failed.' % self.vrt_file)
        else:
            grass.message('r.in.gdal was successful for new raster map %s ' % self.params['output'])

        grass.try_remove(self.vrt_file)
        grass.try_remove(self.xml_file)
        self._debug("_download", "finished")

        return ret
Exemplo n.º 6
0
    def _predict_multi(self, estimator, region, indexes, class_labels, height,
                       func, output, overwrite):
        # create and open rasters for writing if incremental reading
        if height is not None:
            dst = []

            for i, label in enumerate(class_labels):
                rastername = output + "_" + str(label)
                dst.append(RasterRow(rastername))
                dst[i].open("w", mtype="FCELL", overwrite=overwrite)

            # create data reader generator
            n_windows = len([i for i in self.row_windows(height=height)])

            data_gen = (
                (wi, self.read(rows=rows))
                for wi, rows in enumerate(self.row_windows(height=height)))

        # perform prediction
        try:
            if height is not None:
                for wi, arr in data_gen:
                    gs.percent(wi, n_windows, 1)
                    result = func(arr, estimator)
                    result = np.ma.filled(result, np.nan)

                    # write multiple features to GRASS GIS rasters
                    for i, arr_index in enumerate(indexes):
                        for row in range(result.shape[1]):
                            newrow = Buffer((region.cols, ), mtype="FCELL")
                            newrow[:] = result[arr_index, row, :]
                            dst[i].put_row(newrow)
            else:
                arr = self.read()
                result = func(arr, estimator)
                result = np.ma.filled(result, np.nan)

                for i, arr_index in enumerate(indexes):
                    numpy2raster(
                        result[arr_index, :, :],
                        mtype="FCELL",
                        rastname=rastername[i],
                        overwrite=overwrite,
                    )
        except:
            gs.fatal("Error in raster prediction")

        finally:
            if height is not None:
                for i in dst:
                    i.close()

        return RasterStack([i.name for i in dst])
def timestamp_data():
    maps = grass.list_grouped('raster')[mapset]
    grass.message("Timestamping maps...")
    count = len(maps)
    i = 0
    for name in maps:
        i += 1
        grass.percent(i, count, 5)
        date = name.split('_')[-1].split('.')[0]
        grass_date = datetime.datetime.strptime(date, '%Y%m%d').strftime('%d %b %Y')
        grass.run_command('r.timestamp', map=name, date=grass_date)

    grass.percent(0, 0, 0)
Exemplo n.º 8
0
    def _download(self):
        """!Downloads data from WCS server using GDAL WCS driver

        @return ret (exit code of r.in.gdal module)
        """
        self._debug("_download", "started")
        self.xml_file = self._createXML()
        self.vrt_file = self._createVRT()

        gscript.message('Starting module r.in.gdal ...')
        
        if self.params['location'] == "":
            p = gscript.start_command('r.in.gdal',
                     input=self.vrt_file,
                     output=self.params['output'],
                     stderr = gscript.PIPE,
                     env = self._env
            )
        
        else:
            p = gscript.start_command('r.in.gdal',
                     input=self.vrt_file,
                     output=self.params['output'],
                     location = self.params['location'],
                     stderr = gscript.PIPE,
                     env = self._env
            )
        
        # percent status messagei
        while p.poll() is None:
            line = p.stderr.readline()
            linepercent = line.replace('GRASS_INFO_PERCENT:','').strip()
            if linepercent.isdigit():
                #print linepercent
                gscript.percent(int(linepercent),100,1)
            else:
                gscript.verbose(line)
        
        gscript.percent(100,100,5)

        ret = p.wait()
        if ret != 0:
            gscript.fatal('r.in.gdal for %s failed.' % self.vrt_file )
        else:
            gscript.message('r.in.gdal was successful for new raster map %s ' % self.params['output'] )

        gscript.try_remove(self.vrt_file)
        gscript.try_remove(self.xml_file)
        self._debug("_download", "finished")

        return ret
Exemplo n.º 9
0
def timestamp_data():
    maps = grass.list_grouped('raster')[mapset]
    grass.message("Timestamping maps...")
    count = len(maps)
    i = 0
    for name in maps:
        i += 1
        grass.percent(i, count, 5)
        date = name.split('_')[-1].split('.')[0]
        grass_date = datetime.datetime.strptime(date,
                                                '%Y%m%d').strftime('%d %b %Y')
        grass.run_command('r.timestamp', map=name, date=grass_date)

    grass.percent(0, 0, 0)
def import_data(directory):
    files = []
    for f in os.listdir(directory):
        if f.endswith(".tif"):
            files.append(f)

    count = len(files)
    i = 0
    for f in files:
        i += 1
        grass.message("Importing <{0}> ({1}/{2})...".format(f, i, count))
        grass.percent(i, count, 5)
        grass.run_command('r.in.gdal', input=os.path.join(directory, f),
                          output=os.path.splitext(f)[0], quiet=True, overwrite=True)
    grass.percent(0, 0, 0)
def mask_data(band_filter, cfmask_filter, cloud_mask_value, file_separator):
    # do cleanup first
    Module('g.remove', type='raster', pattern='*_masked', flags='f', quiet=True)
    
    # find band1 raster maps first
    bands1 = grass.list_grouped('raster', pattern='*{}1*'.format(band_filter))[mapset]

    mapcalc_module = Module('r.mapcalc', quiet=True, overwrite=True, run_=False)
    color_module = Module('r.colors', color='grey.eq', quiet=True, run_=False)
    
    i = 0
    count = len(bands1)
    for b1 in bands1:
        i += 1
        basename = b1.split(file_separator)[0]
        grass.message("Processing <{0}> ({1}/{2})...".format(basename, i, count))
        grass.percent(i, count, 5)
        
        # set computational region based on first band (ignore NULL values)
        Module('g.region', raster=b1, zoom=b1)
        maskname = '{}{}{}'.format(basename, file_separator, cfmask_filter)
        mask = grass.find_file(maskname, element='raster')['fullname']

        # apply cloud mask if found
        if mask:
            Module('r.mask', flags='i', raster=maskname, maskcats=cloud_mask_value, overwrite=True, quiet=True)
        else:
            grass.warning("Mask missing for <{}>".format(basename))

        # get list of bands
        bands = grass.list_grouped('raster', pattern='{}{}{}*'.format(basename, file_separator, band_filter))[mapset]
        
        # create copy of bands with mask applied (using r.mapcalc)
        for b in bands:
            new_mapcalc = copy.deepcopy(mapcalc_module)
            queue.put(new_mapcalc(expression='{name}_masked={name}'.format(name=b)))
        queue.wait()

        # set color table to grey.eq for masked bands
        masked_bands = grass.list_grouped('raster', pattern='*_masked')[mapset]
        for b in masked_bands:
            new_color = copy.deepcopy(color_module)
            queue.put(new_color(map=b))
        queue.wait()

        # remove mask if applied
        if mask:
            Module('r.mask', flags='r', quiet=True)
Exemplo n.º 12
0
def mask_data(band_filter, cfmask_filter, cloud_mask_value, file_separator):
    # do cleanup first
    grass.run_command('g.remove',
                      type='raster',
                      pattern='*_masked',
                      flags='f',
                      quiet=True)

    # find band1 raster maps first
    bands1 = grass.list_grouped('raster',
                                pattern='*{}1*'.format(band_filter))[mapset]
    count = len(bands1)
    i = 0
    for b1 in bands1:
        i += 1
        basename = b1.split(file_separator)[0]
        grass.message("Processing <{0}> ({1}/{2})...".format(
            basename, i, count))
        grass.percent(i, count, 5)
        # set computational region based on first band
        grass.run_command('g.region', raster=b1)
        maskname = '{}{}{}'.format(basename, file_separator, cfmask_filter)
        mask = grass.find_file(maskname, element='raster')['fullname']
        # apply cloud mask if found
        if mask:
            grass.run_command('r.mask',
                              flags='i',
                              raster=maskname,
                              maskcats=cloud_mask_value,
                              overwrite=True,
                              quiet=True)
        else:
            grass.warning("Mask missing for <{}>".format(basename))
        # create copy of band with mask applied
        bands = grass.list_grouped('raster',
                                   pattern='{}{}{}*'.format(
                                       basename, file_separator,
                                       band_filter))[mapset]
        for b in bands:
            grass.mapcalc('{name}_masked={name}'.format(name=b),
                          quiet=True,
                          overwrite=True)
            grass.run_command('r.colors', map=b, color='grey.eq')
        # remove mask if applied
        if mask:
            grass.run_command('r.mask', flags='r', quiet=True)
Exemplo n.º 13
0
def import_data(directory):
    files = []
    for f in os.listdir(directory):
        if f.endswith(".tif"):
            files.append(f)

    count = len(files)
    i = 0
    for f in files:
        i += 1
        grass.message("Importing <{0}> ({1}/{2})...".format(f, i, count))
        grass.percent(i, count, 5)
        grass.run_command('r.in.gdal',
                          input=os.path.join(directory, f),
                          output=os.path.splitext(f)[0],
                          quiet=True,
                          overwrite=True)
    grass.percent(0, 0, 0)
def timestamp_data(directory, file_separator):
    # process metadata first
    file_datetime = process_metadata(directory)

    maps = grass.list_grouped('raster', pattern='*_masked')[mapset]
    grass.message("Timestamping masked maps...")
    count = len(maps)
    i = 0
    for name in maps:
        i += 1
        grass.percent(i, count, 5)
        basename = name.split(file_separator)[0]
        try:
            grass_date = file_datetime[basename]
        except KeyError:
            grass.warning("No timestamp available for <{}>".format(basename))
        grass.run_command('r.timestamp', map=name, date=grass_date)

    grass.percent(0, 0, 0)
Exemplo n.º 15
0
def timestamp_data(directory, file_separator):
    # process metadata first
    file_datetime = process_metadata(directory)

    maps = grass.list_grouped('raster', pattern='*_masked')[mapset]
    grass.message("Timestamping masked maps...")
    count = len(maps)
    i = 0
    for name in maps:
        i += 1
        grass.percent(i, count, 5)
        basename = name.split(file_separator)[0]
        try:
            grass_date = file_datetime[basename]
        except KeyError:
            grass.warning("No timestamp available for <{}>".format(basename))
        grass.run_command('r.timestamp', map=name, date=grass_date)

    grass.percent(0, 0, 0)
Exemplo n.º 16
0
def main():
    strds = options["input"]
    where = options["where"]
    nprocs = int(options["nprocs"])

    nullmod = pymod.Module("r.null")
    nullmod.flags.quiet = True
    if options["null"]:
        nullmod.inputs.null = options["null"]
    elif options["setnull"]:
        nullmod.inputs.setnull = options["setnull"]
    else:
        gscript.fatal(_("Please set 'null' or 'setnull' option"))

    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(strds, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where, "start_time", None)
    if maps is None:
        gscript.fatal(
            _("Space time raster dataset {st} seems to be "
              "empty".format(st=strds)))
        return 1
    # module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)

    for mapp in maps:
        count += 1
        mod = copy.deepcopy(nullmod)
        mod.inputs.map = mapp.get_id()
        process_queue.put(mod)

        if count % 10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()
Exemplo n.º 17
0
def import_data(directory, file_filter):
    # collect files to be imported
    files = []
    for f in os.listdir(directory):
        if f.endswith(".tif") and [x for x in file_filter if x in f]:
            files.append(f)

    # import selected files into GRASS
    count = len(files)
    i = 0
    for f in files:
        i += 1
        grass.message("Importing <{0}> ({1}/{2})...".format(f, i, count))
        grass.percent(i, count, 2)
        map_name = os.path.splitext(f)[0]
        ###imodule = 'r.external' # ~1sec
        imodule = 'r.in.gdal'  # ~4sec
        grass.run_command(imodule,
                          input=os.path.join(directory, f),
                          output=map_name,
                          quiet=True,
                          overwrite=True)
        # set color table for cfmask map
        if 'cfmask' in map_name:
            # 0 clear
            # 1 water
            # 2 shadow
            # 3 snow
            # 4 cloud
            colors = """0 black
1 blue
2 grey
3 white
4 149 186 224"""
            Module('r.colors',
                   map=map_name,
                   rules='-',
                   quiet=True,
                   stdin_=colors)
def import_data(directory, file_filter):
    # collect files to be imported
    files = []
    for f in os.listdir(directory):
        if f.endswith(".tif") and [x for x in file_filter if x in f]:
            files.append(f)

    # import selected files into GRASS
    imodule = 'r.external'
    ###imodule = 'r.in.gdal'
    import_module = Module(imodule, quiet=True, overwrite=True, run_=False)
    i = 0
    count = len(files)
    for f in files:
        i += 1
        grass.message("Importing <{0}> ({1}/{2})...".format(f, i, count))
        grass.percent(i, count, 2)
        new_import = copy.deepcopy(import_module)
        map_name = os.path.splitext(f)[0]
        queue.put(new_import(input=os.path.join(directory, f), output=map_name))
    queue.wait()

    # set color table for cfmask map
    # 0 clear
    # 1 water
    # 2 shadow
    # 3 snow
    # 4 cloud
    colors = """0 black
1 blue
2 grey
3 white
4 149 186 224"""
    color_module = Module('r.colors', rules='-', quiet=True, stdin_=colors, run_=False)
    cfmask_maps = grass.list_grouped('raster', pattern='*cfmask*')[mapset]
    for map_name in cfmask_maps:
        new_color = copy.deepcopy(color_module)
        queue.put(new_color(map=map_name))
    queue.wait()
Exemplo n.º 19
0
def create_strds_register_maps(in_strds, out_strds, out_maps, register_null,
                               empty_maps, dbif):

    out_id = out_strds.get_id()

    if out_strds.is_in_db(dbif):
        if grass.overwrite():
            out_strds.delete(dbif)
            out_strds = in_strds.get_new_instance(out_id)

    temporal_type, semantic_type, title, description = in_strds.get_initial_values(
    )
    out_strds.set_initial_values(temporal_type, semantic_type, title,
                                 description)
    out_strds.insert(dbif)

    # Register the maps in the database
    count = 0
    for map in out_maps.values():
        count += 1
        if count % 10 == 0:
            grass.percent(count, len(out_maps), 1)
        # Read the raster map data
        map.load()
        # In case of a empty map continue, do not register empty maps
        if not register_null:
            if map.metadata.get_min() is None and \
                map.metadata.get_max() is None:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        out_strds.register_map(map, dbif)

    out_strds.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)
def mask_data():
    bands1 = grass.list_grouped('raster', pattern='lndcal*.1$')[mapset]
    count = len(bands1)
    i = 0
    for b1 in bands1:
        i += 1
        oname = b1.split('.')[1]
        grass.message("Processing <{0}> ({1}/{2})...".format(oname, i, count))
        grass.percent(i, count, 5)
        grass.run_command('g.region', raster=b1)
        mask = grass.find_file('fmask.' + oname, element='raster')['fullname']
        if mask:
            grass.run_command('r.mask', raster='fmask.' + oname, maskcats=1, overwrite=True, quiet=True)
        else:
            grass.warning("Mask missing for <{0}>".format(oname))
        bands = grass.list_grouped('raster', pattern='lndcal.{0}.*'.format(oname))[mapset]
        for b in bands:
            n = b.split('.')[-1]
            grass.mapcalc('{0}.{1}={2}'.format(oname, n, b), quiet=True, overwrite=True)
        if mask:
            grass.run_command('r.mask', flags='r', quiet=True)
        
    grass.run_command('g.remove', type='raster', pattern='fmask*,lndcal*', flags='f', quiet=True)
    grass.percent(0, 0, 0)
Exemplo n.º 21
0
def create_strds_register_maps(in_strds, out_strds, out_maps, register_null,
                    empty_maps, dbif):

    out_id = out_strds.get_id()

    if out_strds.is_in_db(dbif):
        if grass.overwrite():
            out_strds.delete(dbif)
            out_strds = in_strds.get_new_instance(out_id)

    temporal_type, semantic_type, title, description = in_strds.get_initial_values()
    out_strds.set_initial_values(temporal_type, semantic_type, title,
                                    description)
    out_strds.insert(dbif)

    # Register the maps in the database
    count = 0
    for map in out_maps.values():
        count += 1
        if count%10 == 0:
            grass.percent(count, len(out_maps), 1)
        # Read the raster map data
        map.load()
        # In case of a empty map continue, do not register empty maps
        if not register_null:
            if map.metadata.get_min() is None and \
                map.metadata.get_max() is None:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        out_strds.register_map(map, dbif)

    out_strds.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)
Exemplo n.º 22
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    register_null = flags["n"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the HANTS module
    hants_flags = ""
    if flags["l"]:
        hants_flags = hants_flags + 'l'
    if flags["h"]:
        hants_flags = hants_flags + 'h'
    if flags["i"]:
        hants_flags = hants_flags + 'i'

    kwargs = dict()
    kwargs['nf'] = options['nf']
    if options['fet']:
        kwargs['fet'] = options['fet']
    kwargs['dod'] = options['dod']
    if options['range']:
        kwargs['range'] = options['range']
    kwargs['suffix'] = "_hants"
    if len(hants_flags) > 0:
        kwargs['flags'] = hants_flags

    count = 0
    num_maps = len(maps)
    new_maps = []

    maplistfile = script.tempfile()
    fd = open(maplistfile, 'w')

    # create list of input maps and their time stamps
    for map in maps:
        count += 1
        map_name = "{ba}_hants".format(ba=map.get_id())

        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        f.write("{0}\n".format(map.get_id()))

    f.close()

    # run r.hants
    grass.run_command('r.hants',
                      file=maplistfile,
                      suffix="_hants",
                      quiet=True,
                      **kwargs)

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
Exemplo n.º 23
0
def main():
    in_vector = options["input"].split("@")[0]
    if len(options["input"].split("@")) > 1:
        in_mapset = options["input"].split("@")[1]
    else:
        in_mapset = None
    raster_maps = options["raster"].split(
        ",")  # raster file(s) to extract from
    output = options["output"]
    methods = tuple(options["methods"].split(","))
    percentile = (None if options["percentile"] == "" else map(
        float, options["percentile"].split(",")))
    column_prefix = tuple(options["column_prefix"].split(","))
    buffers = options["buffers"].split(",")
    types = options["type"].split(",")
    layer = options["layer"]
    sep = options["separator"]
    update = flags["u"]
    tabulate = flags["t"]
    percent = flags["p"]
    remove = flags["r"]
    use_label = flags["l"]

    empty_buffer_warning = (
        "No data in raster map {} within buffer {} around geometry {}")

    # Do checks using pygrass
    for rmap in raster_maps:
        r_map = RasterAbstractBase(rmap)
        if not r_map.exist():
            grass.fatal("Could not find raster map {}.".format(rmap))

    user_mask = False
    m_map = RasterAbstractBase("MASK", Mapset().name)
    if m_map.exist():
        grass.warning("Current MASK is temporarily renamed.")
        user_mask = True
        unset_mask()

    invect = VectorTopo(in_vector)
    if not invect.exist():
        grass.fatal("Vector file {} does not exist".format(in_vector))

    if output:
        if output == "-":
            out = None
        else:
            out = open(output, "w")

    # Check if input map is in current mapset (and thus editable)
    if in_mapset and unicode(in_mapset) != unicode(Mapset()):
        grass.fatal(
            "Input vector map is not in current mapset and cannot be modified. \
                    Please consider copying it to current mapset.".format(
                output))

    buffers = []
    for buf in options["buffers"].split(","):
        try:
            b = float(buf)
            if b.is_integer():
                buffers.append(int(b))
            else:
                buffers.append(b)
        except:
            grass.fatal("")
        if b < 0:
            grass.fatal("Negative buffer distance not supported!")

    ### Define column types depenting on statistic, map type and
    ### DB backend (SQLite supports only double and not real)
    # int: statistic produces allways integer precision
    # double: statistic produces allways floating point precision
    # map_type: precision f statistic depends on map type
    int_dict = {
        "number": (0, "int", "n"),
        "number_null": (1, "int", "null_cells"),
        "minimum": (3, "map_type", "min"),
        "maximum": (4, "map_type", "max"),
        "range": (5, "map_type", "range"),
        "average": (6, "double", "mean"),
        "average_abs": (7, "double", "mean_of_abs"),
        "stddev": (8, "double", "stddev"),
        "variance": (9, "double", "variance"),
        "coeff_var": (10, "double", "coeff_var"),
        "sum": (11, "map_type", "sum"),
        "first_quartile": (12, "map_type", "first_quartile"),
        "median": (13, "map_type", "median"),
        "third_quartile": (14, "map_type", "third_quartile"),
        "percentile": (15, "map_type", "percentile"),
    }

    if len(raster_maps) != len(column_prefix):
        grass.fatal(
            "Number of maps and number of column prefixes has to be equal!")

    # Generate list of required column names and types
    col_names = []
    valid_labels = []
    col_types = []
    for p in column_prefix:
        rmaptype, val_lab, rcats = raster_type(
            raster_maps[column_prefix.index(p)], tabulate, use_label)
        valid_labels.append(val_lab)

        for b in buffers:
            b_str = str(b).replace(".", "_")
            if tabulate:
                if rmaptype == "double precision":
                    grass.fatal(
                        "{} has floating point precision. Can only tabulate integer maps"
                        .format(raster_maps[column_prefix.index(p)]))
                col_names.append("{}_{}_b{}".format(p, "ncats", b_str))
                col_types.append("int")
                col_names.append("{}_{}_b{}".format(p, "mode", b_str))
                col_types.append("int")
                col_names.append("{}_{}_b{}".format(p, "null", b_str))
                col_types.append("double precision")
                col_names.append("{}_{}_b{}".format(p, "area_tot", b_str))
                col_types.append("double precision")

                for rcat in rcats:
                    if use_label and valid_labels:
                        rcat = rcat[0].replace(" ", "_")
                    else:
                        rcat = rcat[1]
                    col_names.append("{}_{}_b{}".format(p, rcat, b_str))
                    col_types.append("double precision")
            else:
                for m in methods:
                    col_names.append("{}_{}_b{}".format(
                        p, int_dict[m][2], b_str))
                    col_types.append(rmaptype if int_dict[m][1] ==
                                     "map_type" else int_dict[m][1])
                if percentile:
                    for perc in percentile:
                        col_names.append("{}_percentile_{}_b{}".format(
                            p,
                            int(perc) if (perc).is_integer() else perc, b_str))
                        col_types.append(rmaptype if int_dict[m][1] ==
                                         "map_type" else int_dict[m][1])

    # Open input vector map
    in_vect = VectorTopo(in_vector, layer=layer)
    in_vect.open(mode="r")

    # Get name for temporary map
    global TMP_MAPS
    TMP_MAPS.append(tmp_map)

    # Setup stats collectors
    if tabulate:
        # Collector for raster category statistics
        stats = Module("r.stats", run_=False, stdout_=PIPE)
        stats.inputs.sort = "desc"
        stats.inputs.null_value = "null"
        stats.flags.quiet = True
        stats.flags.l = True
        if percent:
            stats.flags.p = True
            stats.flags.n = True
        else:
            stats.flags.a = True
    else:
        # Collector for univariat statistics
        univar = Module("r.univar", run_=False, stdout_=PIPE)
        univar.inputs.separator = sep
        univar.flags.g = True
        univar.flags.quiet = True

        # Add extended statistics if requested
        if set(methods).intersection(
                set(["first_quartile", "median", "third_quartile"])):
            univar.flags.e = True

        if percentile is not None:
            univar.flags.e = True
            univar.inputs.percentile = percentile

    # Check if attribute table exists
    if not output:
        if not in_vect.table:
            grass.fatal(
                "No attribute table found for vector map {}".format(in_vect))

        # Modify table as needed
        tab = in_vect.table
        tab_name = tab.name
        tab_cols = tab.columns

        # Add required columns
        existing_cols = list(set(tab_cols.names()).intersection(col_names))
        if len(existing_cols) > 0:
            if not update:
                in_vect.close()
                grass.fatal(
                    "Column(s) {} already exist! Please use the u-flag \
                            if you want to update values in those columns".
                    format(",".join(existing_cols)))
            else:
                grass.warning("Column(s) {} already exist!".format(
                    ",".join(existing_cols)))
        for e in existing_cols:
            idx = col_names.index(e)
            del col_names[idx]
            del col_types[idx]
        tab_cols.add(col_names, col_types)

        conn = tab.conn
        cur = conn.cursor()

        sql_str_start = "UPDATE {} SET ".format(tab_name)

    elif output == "-":
        print("cat{0}raster_map{0}buffer{0}statistic{0}value".format(sep))
    else:
        out.write("cat{0}raster_map{0}buffer{0}statistic{0}value{1}".format(
            sep, os.linesep))

    # Get computational region
    grass.use_temp_region()
    r = Region()
    r.read()

    # Adjust region extent to buffer around geometry
    # reg = deepcopy(r)

    # Create iterator for geometries of all selected types
    geoms = chain()
    geoms_n = 0
    n_geom = 1
    for geom_type in types:
        geoms_n += in_vect.number_of(geom_type)
        if in_vect.number_of(geom_type) > 0:
            geoms = chain(in_vect.viter(geom_type))

    # Loop over geometries
    for geom in geoms:
        # Get cat
        cat = geom.cat

        # Add where clause to UPDATE statement
        sql_str_end = " WHERE cat = {};".format(cat)

        # Loop over ser provided buffer distances
        for buf in buffers:
            b_str = str(buf).replace(".", "_")
            # Buffer geometry
            if buf <= 0:
                buffer_geom = geom
            else:
                buffer_geom = geom.buffer(buf)
            # Create temporary vector map with buffered geometry
            tmp_vect = VectorTopo(tmp_map, quiet=True)
            tmp_vect.open(mode="w")
            tmp_vect.write(Boundary(points=buffer_geom[0].to_list()))
            # , c_cats=int(cat), set_cats=True
            if callable(buffer_geom[1]):
                tmp_vect.write(Centroid(x=buffer_geom[1]().x,
                                        y=buffer_geom[1]().y),
                               cat=int(cat))
            else:
                tmp_vect.write(Centroid(x=buffer_geom[1].x,
                                        y=buffer_geom[1].y),
                               cat=int(cat))

            #################################################
            # How to silence VectorTopo???
            #################################################

            # Save current stdout
            # original = sys.stdout

            # f = open(os.devnull, 'w')
            # with open('output.txt', 'w') as f:
            # sys.stdout = io.BytesIO()
            # sys.stdout.fileno() = os.devnull
            # sys.stderr = f
            # os.environ.update(dict(GRASS_VERBOSE='0'))
            tmp_vect.close(build=False)
            grass.run_command("v.build", map=tmp_map, quiet=True)
            # os.environ.update(dict(GRASS_VERBOSE='1'))

            # reg = Region()
            # reg.read()
            # r.from_vect(tmp_map)
            r = align_current(r, buffer_geom[0].bbox())
            r.write()

            # Check if the following is needed
            # needed specially with r.stats -p
            # grass.run_command('g.region', vector=tmp_map, flags='a')

            # Create a MASK from buffered geometry
            if user_mask:
                grass.run_command(
                    "v.to.rast",
                    input=tmp_map,
                    output=tmp_map,
                    use="val",
                    value=int(cat),
                    quiet=True,
                )
                mc_expression = (
                    "MASK=if(!isnull({0}) && !isnull({0}_MASK), {1}, null())".
                    format(tmp_map, cat))
                grass.run_command("r.mapcalc",
                                  expression=mc_expression,
                                  quiet=True)
            else:
                grass.run_command(
                    "v.to.rast",
                    input=tmp_map,
                    output="MASK",
                    use="val",
                    value=int(cat),
                    quiet=True,
                )

            # reg.write()

            updates = []
            # Compute statistics for every raster map
            for rm, rmap in enumerate(raster_maps):
                # rmap = raster_maps[rm]
                prefix = column_prefix[rm]

                if tabulate:
                    # Get statistics on occurrence of raster categories within buffer
                    stats.inputs.input = rmap
                    stats.run()
                    t_stats = (stats.outputs["stdout"].value.rstrip(
                        os.linesep).replace("  ", " ").replace(
                            "no data", "no_data").replace(
                                " ",
                                "_b{} = ".format(b_str)).split(os.linesep))
                    if t_stats == [""]:
                        grass.warning(
                            empty_buffer_warning.format(rmap, buf, cat))
                        continue
                    if (t_stats[0].split(
                            "_b{} = ".format(b_str))[0].split("_")[-1] !=
                            "null"):
                        mode = (t_stats[0].split(
                            "_b{} = ".format(b_str))[0].split("_")[-1])
                    elif len(t_stats) == 1:
                        mode = "NULL"
                    else:
                        mode = (t_stats[1].split(
                            "_b{} = ".format(b_str))[0].split("_")[-1])

                    if not output:
                        updates.append("\t{}_{}_b{} = {}".format(
                            prefix, "ncats", b_str, len(t_stats)))
                        updates.append("\t{}_{}_b{} = {}".format(
                            prefix, "mode", b_str, mode))

                        area_tot = 0
                        for l in t_stats:
                            # check if raster maps has category or not
                            if len(l.split("=")) == 2:
                                updates.append("\t{}_{}".format(
                                    prefix, l.rstrip("%")))
                            elif not l.startswith("null"):
                                vals = l.split("=")
                                updates.append("\t{}_{} = {}".format(
                                    prefix,
                                    vals[-2].strip()
                                    if valid_labels[rm] else vals[0].strip(),
                                    vals[-1].strip().rstrip("%"),
                                ))
                            if not l.startswith("null"):
                                area_tot += float(
                                    l.rstrip("%").split("= ")[-1])
                        if not percent:
                            updates.append("\t{}_{}_b{} = {}".format(
                                prefix, "area_tot", b_str, area_tot))

                    else:
                        out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format(
                            sep, cat, prefix, buf, "ncats", len(t_stats),
                            os.linesep)
                        out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format(
                            sep, cat, prefix, buf, "mode", mode, os.linesep)
                        area_tot = 0
                        for l in t_stats:
                            rcat = (l.split("= ")[1].rstrip(
                                "_b{} = ".format(b_str))
                                    if valid_labels[rm] else l.split("_")[0])
                            area = l.split("= ")[-1]
                            out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format(
                                sep,
                                cat,
                                prefix,
                                buf,
                                "area {}".format(rcat),
                                area,
                                os.linesep,
                            )
                            if rcat != "null":
                                area_tot = area_tot + float(
                                    l.rstrip("%").split("= ")[-1])
                        if not percent:
                            out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format(
                                sep,
                                cat,
                                prefix,
                                buf,
                                "area total",
                                area_tot,
                                os.linesep,
                            )

                        if output == "-":
                            print(out_str.rstrip(os.linesep))
                        else:
                            out.write(out_str)

                else:
                    # Get univariate statistics within buffer
                    univar.inputs.map = rmap
                    univar.run()
                    u_stats = (univar.outputs["stdout"].value.rstrip(
                        os.linesep).replace(
                            "=", "_b{} = ".format(b_str)).split(os.linesep))

                    # Test if u_stats is empty and give warning
                    # Needs to be adjusted to number of requested stats?
                    if ((percentile and len(u_stats) < 14)
                            or (univar.flags.e and len(u_stats) < 13)
                            or len(u_stats) < 12):
                        grass.warning(
                            empty_buffer_warning.format(rmap, buf, cat))
                        break

                    # Extract statistics for selected methods
                    for m in methods:
                        if not output:
                            # Add to list of UPDATE statements
                            updates.append("\t{}_{}".format(
                                prefix,
                                u_stats[int_dict[m][0]] if is_number(
                                    u_stats[int_dict[m][0]].split(" = ")[1])
                                else " = ".join([
                                    u_stats[int_dict[m][0]].split(" = ")[0],
                                    "NULL",
                                ]),
                            ))
                        else:
                            out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}".format(
                                sep,
                                cat,
                                prefix,
                                buf,
                                m,
                                u_stats[int_dict[m][0]].split("= ")[1],
                            )
                            if output == "-":
                                print(out_str)
                            else:
                                out.write("{}{}".format(out_str, os.linesep))

                    if percentile:
                        perc_count = 0
                        for perc in percentile:
                            if not output:
                                updates.append(
                                    "{}_percentile_{}_b{} = {}".format(
                                        p,
                                        int(perc) if
                                        (perc).is_integer() else perc,
                                        b_str,
                                        u_stats[15 +
                                                perc_count].split("= ")[1],
                                    ))
                            else:
                                out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}".format(
                                    sep,
                                    cat,
                                    prefix,
                                    buf,
                                    "percentile_{}".format(
                                        int(perc) if (
                                            perc).is_integer() else perc),
                                    u_stats[15 + perc_count].split("= ")[1],
                                )
                                if output == "-":
                                    print(out_str)
                                else:
                                    out.write(out_str)
                            perc_count = perc_count + 1

            if not output and len(updates) > 0:
                cur.execute("{}{}{}".format(sql_str_start, ",\n".join(updates),
                                            sql_str_end))

            # Remove temporary maps
            # , stderr=os.devnull, stdout_=os.devnull)
            grass.run_command("g.remove",
                              flags="f",
                              type="raster",
                              name="MASK",
                              quiet=True)
            grass.run_command("g.remove",
                              flags="f",
                              type="vector",
                              name=tmp_map,
                              quiet=True)

        # Give progress information
        grass.percent(n_geom, geoms_n, 1)
        n_geom = n_geom + 1

        if not output:
            conn.commit()

    # Close cursor and DB connection
    if not output and not output == "-":
        cur.close()
        conn.close()
        # Update history
        grass.vector.vector_history(in_vector)
    elif output != "-":
        # write results to file
        out.close()

    if remove and not output:
        dropcols = []
        selectnum = "select count({}) from {}"
        for i in col_names:
            thisrow = grass.read_command("db.select",
                                         flags="c",
                                         sql=selectnum.format(i, in_vector))
            if int(thisrow) == 0:
                dropcols.append(i)
        grass.debug("Columns to delete: {}".format(", ".join(dropcols)),
                    debug=2)
        if dropcols:
            grass.run_command("v.db.dropcolumn",
                              map=in_vector,
                              columns=dropcols)
Exemplo n.º 24
0
def main():
    # lazy imports
    import grass.temporal as tgis

    # Get the options
    name = options["input"]
    type = options["type"]
    title = options["title"]
    aggr_type = options["aggr_type"]
    description = options["description"]
    semantic = options["semantictype"]
    update = flags["u"]
    map_update = flags["m"]

    # Make sure the temporal database exists
    tgis.init()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    stds = tgis.open_old_stds(name, type, dbif)

    update = False
    if aggr_type and type == "stvds":
        return ()

    if aggr_type and type != "stvds":
        stds.metadata.set_aggregation_type(aggregation_type=aggr_type)
        update = True
    if title:
        stds.metadata.set_title(title=title)
        update = True
        # Update only non-null entries
    if description:
        stds.metadata.set_description(description=description)
        update = True
    if semantic:
        stds.base.set_semantic_type(semantic_type=semantic)
        update = True

    if update:
        stds.update(dbif=dbif)

    if map_update:
        # Update the registered maps from the grass spatial database
        statement = ""
        # This dict stores the datasets that must be updated
        dataset_dict = {}

        count = 0
        maps = stds.get_registered_maps_as_objects(dbif=dbif)

        # We collect the delete and update statements
        for map in maps:

            count += 1
            if count % 10 == 0:
                grass.percent(count, len(maps), 1)

            map.select(dbif=dbif)

            # Check if the map is present in the grass spatial database
            # Update if present, delete if not present
            if map.map_exists():
                # Read new metadata from the spatial database
                map.load()
                statement += map.update(dbif=dbif, execute=False)
            else:
                # Delete the map from the temporal database
                # We need to update all effected space time datasets
                datasets = map.get_registered_stds(dbif)
                if datasets:
                    for dataset in datasets:
                        dataset_dict[dataset] = dataset
                # Collect the delete statements
                statement += map.delete(dbif=dbif, update=False, execute=False)

        # Execute the collected SQL statements
        dbif.execute_transaction(statement)

        # Update the effected space time datasets
        for id in dataset_dict:
            stds_new = stds.get_new_instance(id)
            stds_new.select(dbif=dbif)
            stds_new.update_from_registered_maps(dbif=dbif)

    if map_update or update:
        stds.update_from_registered_maps(dbif=dbif)

    stds.update_command_string(dbif=dbif)

    dbif.close()
def main():
    """Do the main processing
    """

    # Parse input options:
    patch_map = options['input']
    patches = patch_map.split('@')[0]
    patches_mapset = patch_map.split('@')[1] if len(
        patch_map.split('@')) > 1 else None
    pop_proxy = options['pop_proxy']
    layer = options['layer']
    costs = options['costs']
    cutoff = float(options['cutoff'])
    border_dist = int(options['border_dist'])
    conefor_dir = options['conefor_dir']
    memory = int(options['memory'])

    # Parse output options:
    prefix = options['prefix']
    edge_map = '{}_edges'.format(prefix)
    vertex_map = '{}_vertices'.format(prefix)
    shortest_paths = '{}_shortest_paths'.format(prefix)

    # Parse flags:
    p_flag = flags['p']
    t_flag = flags['t']
    r_flag = flags['r']

    dist_flags = 'kn' if flags['k'] else 'n'

    lin_cat = 1
    zero_dist = None

    folder = grass.tempdir()
    if not os.path.exists(folder):
        os.makedirs(folder)

    # Setup counter for progress message
    counter = 0

    # Check if location is lat/lon (only in lat/lon geodesic distance
    # measuring is supported)
    if grass.locn_is_latlong():
        grass.verbose("Location is lat/lon: Geodesic distance \
                      measure is used")

    # Check if prefix is legal GRASS name
    if not grass.legal_name(prefix):
        grass.fatal('{} is not a legal name for GRASS \
                    maps.'.format(prefix))

    if prefix[0].isdigit():
        grass.fatal('Tables names starting with a digit are not SQL \
                    compliant.'.format(prefix))

    # Check if output maps not already exists or could be overwritten
    for output in [edge_map, vertex_map, shortest_paths]:
        if grass.db.db_table_exist(output) and not grass.overwrite():
            grass.fatal('Vector map <{}> already exists'.format(output))

    # Check if input has required attributes
    in_db_connection = grass.vector.vector_db(patch_map)
    if not int(layer) in in_db_connection.keys():
        grass.fatal('No attribute table connected vector map {} at \
                    layer {}.'.format(patches, layer))

    #Check if cat column exists
    pcols = grass.vector.vector_columns(patch_map, layer=layer)

    #Check if cat column exists
    if not 'cat' in pcols.keys():
        grass.fatal('Cannot find the reqired column cat in vector map \
                    {}.'.format(patches))

    #Check if pop_proxy column exists
    if not pop_proxy in pcols.keys():
        grass.fatal('Cannot find column {} in vector map \
                    {}'.format(pop_proxy, patches))

    #Check if pop_proxy column is numeric type
    if not pcols[pop_proxy]['type'] in ['INTEGER', 'REAL', 'DOUBLE PRECISION']:
        grass.fatal('Column {} is of type {}. Only numeric types \
                    (integer or double precision) \
                    allowed!'.format(pop_proxy, pcols[pop_proxy]['type']))

    #Check if pop_proxy column does not contain values <= 0
    pop_vals = np.fromstring(grass.read_command('v.db.select',
                                                flags='c',
                                                map=patches,
                                                columns=pop_proxy,
                                                nv=-9999).rstrip('\n'),
                             dtype=float,
                             sep='\n')

    if np.min(pop_vals) <= 0:
        grass.fatal('Column {} contains values <= 0 or NULL. Neither \
                    values <= 0 nor NULL allowed!}'.format(pop_proxy))

    ##############################################
    # Use pygrass region instead of grass.parse_command !?!
    start_reg = grass.parse_command('g.region', flags='ugp')

    max_n = start_reg['n']
    min_s = start_reg['s']
    max_e = start_reg['e']
    min_w = start_reg['w']
    # cost_nsres = reg['nsres']
    # cost_ewres = reg['ewres']

    # Rasterize patches
    # http://www.gdal.org/gdal_tutorial.html
    # http://geoinformaticstutorial.blogspot.no/2012/11/convert-
    # shapefile-to-raster-with-gdal.html
    if t_flag:
        # Rasterize patches with "all-touched" mode using GDAL
        # Read region-settings (not needed canuse max_n, min_s, max_e,
        # min_w nsres, ewres...
        prast = os.path.join(folder, 'patches_rast.tif')

        # Check if GDAL-GRASS plugin is installed
        if ogr.GetDriverByName('GRASS'):
            #With GDAL-GRASS plugin
            #Locate file for patch vector map
            pfile = grass.parse_command('g.findfile',
                                        element='vector',
                                        file=patches,
                                        mapset=patches_mapset)['file']
            pfile = os.path.join(pfile, 'head')

        else:
            # Without GDAL-GRASS-plugin
            grass.warning("Cannot find GDAL-GRASS plugin. Consider \
                          installing it in order to save time for \
                          all-touched rasterisation")
            pfile = os.path.join(folder, 'patches_vect.gpkg')
            # Export patch vector map to temp-file in a GDAL-readable
            # format (shp)
            grass.run_command('v.out.ogr',
                              flags='m',
                              quiet=True,
                              input=patch_map,
                              type='area',
                              layer=layer,
                              output=pfile,
                              lco='GEOMETRY_NAME=geom')

        # Rasterize vector map with all-touched option
        os.system('gdal_rasterize -l {} -at -tr {} {} \
                  -te {} {} {} {} -ot Uint32 -a cat \
                  {} {} -q'.format(patches, start_reg['ewres'],
                                   start_reg['nsres'], start_reg['w'],
                                   start_reg['s'], start_reg['e'],
                                   start_reg['n'], pfile, prast))

        if not ogr.GetDriverByName('GRASS'):
            # Remove vector temp-file
            os.remove(os.path.join(folder, 'patches_vect.gpkg'))

        # Import rasterized patches
        grass.run_command('r.external',
                          flags='o',
                          quiet=True,
                          input=prast,
                          output='{}_patches_pol'.format(TMP_PREFIX))

    else:
        # Simple rasterisation (only area)
        # in G 7.6 also with support for 'centroid'
        if float(grass.version()['version'][:3]) >= 7.6:
            conv_types = ['area', 'centroid']
        else:
            conv_types = ['area']
        grass.run_command('v.to.rast',
                          quiet=True,
                          input=patches,
                          use='cat',
                          type=conv_types,
                          output='{}_patches_pol'.format(TMP_PREFIX))

    # Extract boundaries from patch raster map
    grass.run_command('r.mapcalc',
                      expression='{p}_patches_boundary=if(\
    {p}_patches_pol,\
    if((\
    (isnull({p}_patches_pol[-1,0])||| \
    {p}_patches_pol[-1,0]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[0,1])||| \
    {p}_patches_pol[0,1]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[1,0])||| \
    {p}_patches_pol[1,0]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[0,-1])||| \
    {p}_patches_pol[0,-1]!={p}_patches_pol)), \
    {p}_patches_pol,null()), null())'.format(p=TMP_PREFIX),
                      quiet=True)

    rasterized_cats = grass.read_command(
        'r.category',
        separator='newline',
        map='{p}_patches_boundary'.format(p=TMP_PREFIX)).replace(
            '\t', '').strip('\n')
    rasterized_cats = list(
        map(int, set([x for x in rasterized_cats.split('\n') if x != ''])))

    #Init output vector maps if they are requested by user
    network = VectorTopo(edge_map)
    network_columns = [(u'cat', 'INTEGER PRIMARY KEY'), (u'from_p', 'INTEGER'),
                       (u'to_p', 'INTEGER'), (u'min_dist', 'DOUBLE PRECISION'),
                       (u'dist', 'DOUBLE PRECISION'),
                       (u'max_dist', 'DOUBLE PRECISION')]
    network.open('w', tab_name=edge_map, tab_cols=network_columns)

    vertex = VectorTopo(vertex_map)
    vertex_columns = [
        (u'cat', 'INTEGER PRIMARY KEY'),
        (pop_proxy, 'DOUBLE PRECISION'),
    ]
    vertex.open('w', tab_name=vertex_map, tab_cols=vertex_columns)

    if p_flag:
        # Init cost paths file for start-patch
        grass.run_command('v.edit',
                          quiet=True,
                          map=shortest_paths,
                          tool='create')
        grass.run_command('v.db.addtable',
                          quiet=True,
                          map=shortest_paths,
                          columns="cat integer,\
                                   from_p integer,\
                                   to_p integer,\
                                   dist_min double precision,\
                                   dist double precision,\
                                   dist_max double precision")

    start_region_bbox = Bbox(north=float(max_n),
                             south=float(min_s),
                             east=float(max_e),
                             west=float(min_w))
    vpatches = VectorTopo(patches, mapset=patches_mapset)
    vpatches.open('r', layer=int(layer))

    ###Loop through patches
    vpatch_ids = np.array(vpatches.features_to_wkb_list(
        feature_type="centroid", bbox=start_region_bbox),
                          dtype=[('vid', 'uint32'), ('cat', 'uint32'),
                                 ('geom', '|S10')])
    cats = set(vpatch_ids['cat'])
    n_cats = len(cats)
    if n_cats < len(vpatch_ids['cat']):
        grass.verbose('At least one MultiPolygon found in patch map.\n \
                      Using average coordinates of the centroids for \
                      visual representation of the patch.')

    for cat in cats:
        if cat not in rasterized_cats:
            grass.warning('Patch {} has not been rasterized and will \
                          therefore not be treated as part of the \
                          network. Consider using t-flag or change \
                          resolution.'.format(cat))

            continue
        grass.verbose("Calculating connectivity-distances for patch \
                      number {}".format(cat))

        # Filter
        from_vpatch = vpatch_ids[vpatch_ids['cat'] == cat]

        # Get patch ID
        if from_vpatch['vid'].size == 1:
            from_centroid = Centroid(v_id=int(from_vpatch['vid']),
                                     c_mapinfo=vpatches.c_mapinfo)
            from_x = from_centroid.x
            from_y = from_centroid.y

            # Get centroid
            if not from_centroid:
                continue
        else:
            xcoords = []
            ycoords = []
            for f_p in from_vpatch['vid']:
                from_centroid = Centroid(v_id=int(f_p),
                                         c_mapinfo=vpatches.c_mapinfo)
                xcoords.append(from_centroid.x)
                ycoords.append(from_centroid.y)

                # Get centroid
                if not from_centroid:
                    continue
            from_x = np.average(xcoords)
            from_y = np.average(ycoords)

        # Get BoundingBox
        from_bbox = grass.parse_command('v.db.select',
                                        map=patch_map,
                                        flags='r',
                                        where='cat={}'.format(cat))

        attr_filter = vpatches.table.filters.select(pop_proxy)
        attr_filter = attr_filter.where("cat={}".format(cat))
        proxy_val = vpatches.table.execute().fetchone()

        # Prepare start patch
        start_patch = '{}_patch_{}'.format(TMP_PREFIX, cat)
        reclass_rule = grass.encode('{} = 1\n* = NULL'.format(cat))
        recl = grass.feed_command(
            'r.reclass',
            quiet=True,
            input='{}_patches_boundary'.format(TMP_PREFIX),
            output=start_patch,
            rules='-')
        recl.stdin.write(reclass_rule)
        recl.stdin.close()
        recl.wait()

        # Check if patch was rasterised (patches smaller raster resolution and close to larger patches may not be rasterised)
        #start_check = grass.parse_command('r.info', flags='r', map=start_patch)
        #start_check = grass.parse_command('r.univar', flags='g', map=start_patch)
        #print(start_check)
        """if start_check['min'] != '1':
            grass.warning('Patch {} has not been rasterized and will \
                          therefore not be treated as part of the \
                          network. Consider using t-flag or change \
                          resolution.'.format(cat))

            grass.run_command('g.remove', flags='f', vector=start_patch,
                              raster=start_patch, quiet=True)
            grass.del_temp_region()
            continue"""

        # Prepare stop patches
        ############################################
        reg = grass.parse_command('g.region',
                                  flags='ug',
                                  quiet=True,
                                  raster=start_patch,
                                  n=float(from_bbox['n']) + float(cutoff),
                                  s=float(from_bbox['s']) - float(cutoff),
                                  e=float(from_bbox['e']) + float(cutoff),
                                  w=float(from_bbox['w']) - float(cutoff),
                                  align='{}_patches_pol'.format(TMP_PREFIX))

        north = reg['n'] if max_n > reg['n'] else max_n
        south = reg['s'] if min_s < reg['s'] else min_s
        east = reg['e'] if max_e < reg['e'] else max_e
        west = reg['w'] if min_w > reg['w'] else min_w

        # Set region to patch search radius
        grass.use_temp_region()
        grass.run_command('g.region',
                          quiet=True,
                          n=north,
                          s=south,
                          e=east,
                          w=west,
                          align='{}_patches_pol'.format(TMP_PREFIX))

        # Create buffer around start-patch as a mask
        # for cost distance analysis
        grass.run_command('r.buffer',
                          quiet=True,
                          input=start_patch,
                          output='MASK',
                          distances=cutoff)
        grass.run_command('r.mapcalc',
                          quiet=True,
                          expression='{pf}_patch_{p}_neighbours_contur=\
                                     if({pf}_patches_boundary=={p},\
                                     null(),\
                                     {pf}_patches_boundary)'.format(
                              pf=TMP_PREFIX, p=cat))
        grass.run_command('r.mask', flags='r', quiet=True)

        # Calculate cost distance
        cost_distance_map = '{}_patch_{}_cost_dist'.format(prefix, cat)
        grass.run_command('r.cost',
                          flags=dist_flags,
                          quiet=True,
                          overwrite=True,
                          input=costs,
                          output=cost_distance_map,
                          start_rast=start_patch,
                          memory=memory)

        #grass.run_command('g.region', flags='up')
        # grass.raster.raster_history(cost_distance_map)
        cdhist = History(cost_distance_map)
        cdhist.clear()
        cdhist.creator = os.environ['USER']
        cdhist.write()
        # History object cannot modify description
        grass.run_command('r.support',
                          map=cost_distance_map,
                          description='Generated by r.connectivity.distance',
                          history=os.environ['CMDLINE'])

        # Export distance at boundaries
        maps = '{0}_patch_{1}_neighbours_contur,{2}_patch_{1}_cost_dist'
        maps = maps.format(TMP_PREFIX, cat, prefix),

        connections = grass.encode(
            grass.read_command('r.stats',
                               flags='1ng',
                               quiet=True,
                               input=maps,
                               separator=';').rstrip('\n'))
        if connections:
            con_array = np.genfromtxt(BytesIO(connections),
                                      delimiter=';',
                                      dtype=None,
                                      names=['x', 'y', 'cat', 'dist'])
        else:
            grass.warning('No connections for patch {}'.format(cat))

            # Write centroid to vertex map
            vertex.write(Point(from_x, from_y), cat=int(cat), attrs=proxy_val)
            vertex.table.conn.commit()

            # Remove temporary map data
            grass.run_command('g.remove',
                              quiet=True,
                              flags='f',
                              type=['raster', 'vector'],
                              pattern="{}*{}*".format(TMP_PREFIX, cat))
            grass.del_temp_region()
            continue

        #Find closest points on neigbour patches
        to_cats = set(np.atleast_1d(con_array['cat']))
        to_coords = []
        for to_cat in to_cats:
            connection = con_array[con_array['cat'] == to_cat]
            connection.sort(order=['dist'])
            pixel = border_dist if len(
                connection) > border_dist else len(connection) - 1
            # closest_points_x = connection['x'][pixel]
            # closest_points_y = connection['y'][pixel]
            closest_points_to_cat = to_cat
            closest_points_min_dist = connection['dist'][0]
            closest_points_dist = connection['dist'][pixel]
            closest_points_max_dist = connection['dist'][-1]
            to_patch_ids = vpatch_ids[vpatch_ids['cat'] == int(to_cat)]['vid']

            if len(to_patch_ids) == 1:
                to_centroid = Centroid(v_id=to_patch_ids,
                                       c_mapinfo=vpatches.c_mapinfo)
                to_x = to_centroid.x
                to_y = to_centroid.y
            elif len(to_patch_ids) >= 1:
                xcoords = []
                ycoords = []
                for t_p in to_patch_ids:
                    to_centroid = Centroid(v_id=int(t_p),
                                           c_mapinfo=vpatches.c_mapinfo)
                    xcoords.append(to_centroid.x)
                    ycoords.append(to_centroid.y)

                    # Get centroid
                    if not to_centroid:
                        continue
                to_x = np.average(xcoords)
                to_y = np.average(ycoords)

            to_coords.append('{},{},{},{},{},{}'.format(
                connection['x'][0], connection['y'][0], to_cat,
                closest_points_min_dist, closest_points_dist,
                closest_points_max_dist))

            #Save edges to network dataset
            if closest_points_dist <= 0:
                zero_dist = 1

            # Write data to network
            network.write(Line([(from_x, from_y), (to_x, to_y)]),
                          cat=lin_cat,
                          attrs=(
                              cat,
                              int(closest_points_to_cat),
                              closest_points_min_dist,
                              closest_points_dist,
                              closest_points_max_dist,
                          ))
            network.table.conn.commit()

            lin_cat = lin_cat + 1

        # Save closest points and shortest paths through cost raster as
        # vector map (r.drain limited to 1024 points) if requested
        if p_flag:
            grass.verbose('Extracting shortest paths for patch number \
                          {}...'.format(cat))

            points_n = len(to_cats)

            tiles = int(points_n / 1024.0)
            rest = points_n % 1024
            if not rest == 0:
                tiles = tiles + 1

            tile_n = 0
            while tile_n < tiles:
                tile_n = tile_n + 1
                #Import closest points for start-patch in 1000er blocks
                sp = grass.feed_command('v.in.ascii',
                                        flags='nr',
                                        overwrite=True,
                                        quiet=True,
                                        input='-',
                                        stderr=subprocess.PIPE,
                                        output="{}_{}_cp".format(
                                            TMP_PREFIX, cat),
                                        separator=",",
                                        columns="x double precision,\
                                           y double precision,\
                                           to_p integer,\
                                           dist_min double precision,\
                                           dist double precision,\
                                           dist_max double precision")
                sp.stdin.write(grass.encode("\n".join(to_coords)))
                sp.stdin.close()
                sp.wait()

                # Extract shortest paths for start-patch in chunks of
                # 1024 points
                cost_paths = "{}_{}_cost_paths".format(TMP_PREFIX, cat)
                start_points = "{}_{}_cp".format(TMP_PREFIX, cat)
                grass.run_command('r.drain',
                                  overwrite=True,
                                  quiet=True,
                                  input=cost_distance_map,
                                  output=cost_paths,
                                  drain=cost_paths,
                                  start_points=start_points)

                grass.run_command('v.db.addtable',
                                  map=cost_paths,
                                  quiet=True,
                                  columns="cat integer,\
                                   from_p integer,\
                                   to_p integer,\
                                   dist_min double precision,\
                                   dist double precision,\
                                   dist_max double precision")
                grass.run_command('v.db.update',
                                  map=cost_paths,
                                  column='from_p',
                                  value=cat,
                                  quiet=True)
                grass.run_command('v.distance',
                                  quiet=True,
                                  from_=cost_paths,
                                  to=start_points,
                                  upload='to_attr',
                                  column='to_p',
                                  to_column='to_p')
                grass.run_command('v.db.join',
                                  quiet=True,
                                  map=cost_paths,
                                  column='to_p',
                                  other_column='to_p',
                                  other_table=start_points,
                                  subset_columns='dist_min,dist,dist_max')

                #grass.run_command('v.info', flags='c',
                #                  map=cost_paths)
                grass.run_command('v.patch',
                                  flags='ae',
                                  overwrite=True,
                                  quiet=True,
                                  input=cost_paths,
                                  output=shortest_paths)

                # Remove temporary map data
                grass.run_command('g.remove',
                                  quiet=True,
                                  flags='f',
                                  type=['raster', 'vector'],
                                  pattern="{}*{}*".format(TMP_PREFIX, cat))

        # Remove temporary map data for patch
        if r_flag:
            grass.run_command('g.remove',
                              flags='f',
                              type='raster',
                              name=cost_distance_map,
                              quiet=True)

        vertex.write(Point(from_x, from_y), cat=int(cat), attrs=proxy_val)

        vertex.table.conn.commit()

        # Print progress message
        grass.percent(i=int((float(counter) / n_cats) * 100), n=100, s=3)

        # Update counter for progress message
        counter = counter + 1

    if zero_dist:
        grass.warning('Some patches are directly adjacent to others. \
                       Minimum distance set to 0.0000000001')

    # Close vector maps and build topology
    network.close()
    vertex.close()

    # Add vertex attributes
    # grass.run_command('v.db.addtable', map=vertex_map)
    # grass.run_command('v.db.join', map=vertex_map, column='cat',
    #                   other_table=in_db_connection[int(layer)]['table'],
    #                   other_column='cat', subset_columns=pop_proxy,
    #                   quiet=True)

    # Add history and meta data to produced maps
    grass.run_command('v.support',
                      flags='h',
                      map=edge_map,
                      person=os.environ['USER'],
                      cmdhist=os.environ['CMDLINE'])

    grass.run_command('v.support',
                      flags='h',
                      map=vertex_map,
                      person=os.environ['USER'],
                      cmdhist=os.environ['CMDLINE'])

    if p_flag:
        grass.run_command('v.support',
                          flags='h',
                          map=shortest_paths,
                          person=os.environ['USER'],
                          cmdhist=os.environ['CMDLINE'])

    # Output also Conefor files if requested
    if conefor_dir:
        query = """SELECT p_from, p_to, avg(dist) FROM
                 (SELECT
                 CASE
                 WHEN from_p > to_p THEN to_p
                 ELSE from_p END AS p_from,
                    CASE
                 WHEN from_p > to_p THEN from_p
                 ELSE to_p END AS p_to,
                 dist
                 FROM {}) AS x
                 GROUP BY p_from, p_to""".format(edge_map)
        with open(os.path.join(conefor_dir, 'undirected_connection_file'),
                  'w') as edges:
            edges.write(
                grass.read_command('db.select', sql=query, separator=' '))
        with open(os.path.join(conefor_dir, 'directed_connection_file'),
                  'w') as edges:
            edges.write(
                grass.read_command('v.db.select',
                                   map=edge_map,
                                   separator=' ',
                                   flags='c'))
        with open(os.path.join(conefor_dir, 'node_file'), 'w') as nodes:
            nodes.write(
                grass.read_command('v.db.select',
                                   map=vertex_map,
                                   separator=' ',
                                   flags='c'))
Exemplo n.º 26
0
def main():

    global TMPLOC, SRCGISRC, TGTGISRC, GISDBASE
    global tile, tmpdir, in_temp, currdir, tmpregionname

    in_temp = False

    url = options['url']
    username = options['username']
    password = options['password']
    local = options['local']
    output = options['output']
    memory = options['memory']
    fillnulls = flags['n']
    srtmv3 = (flags['2'] == 0)
    one = flags['1']
    dozerotile = flags['z']
    reproj_res = options['resolution']

    overwrite = grass.overwrite()

    res = '00:00:03'
    if srtmv3:
        fillnulls = 0
        if one:
            res = '00:00:01'
    else:
        one = None

    if len(local) == 0:
        if len(url) == 0:
            if srtmv3:
                if one:
                    url = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11/'
                else:
                    url = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL3.003/2000.02.11/'
            else:
                url = 'http://dds.cr.usgs.gov/srtm/version2_1/SRTM3/'

    if len(local) == 0:
        local = None

    # are we in LatLong location?
    s = grass.read_command("g.proj", flags='j')
    kv = grass.parse_key_val(s)

    if fillnulls == 1 and memory <= 0:
        grass.warning(_("Amount of memory to use for interpolation must be positive, setting to 300 MB"))
        memory = '300'

    # make a temporary directory
    tmpdir = grass.tempfile()
    grass.try_remove(tmpdir)
    os.mkdir(tmpdir)
    currdir = os.getcwd()
    pid = os.getpid()

    # change to temporary directory
    os.chdir(tmpdir)
    in_temp = True
    if local is None:
        local = tmpdir

    # save region
    tmpregionname = 'r_in_srtm_tmp_region'
    grass.run_command('g.region', save=tmpregionname, overwrite=overwrite)

    # get extents
    if kv['+proj'] == 'longlat':
        reg = grass.region()
    else:
        if not options['resolution']:
            grass.fatal(_("The <resolution> must be set if the projection is not 'longlat'."))
        reg2 = grass.parse_command('g.region', flags='uplg')
        north = [float(reg2['ne_lat']), float(reg2['nw_lat'])]
        south = [float(reg2['se_lat']), float(reg2['sw_lat'])]
        east = [float(reg2['ne_long']), float(reg2['se_long'])]
        west = [float(reg2['nw_long']), float(reg2['sw_long'])]
        reg = {}
        if np.mean(north) > np.mean(south):
            reg['n'] = max(north)
            reg['s'] = min(south)
        else:
            reg['n'] = min(north)
            reg['s'] = max(south)
        if np.mean(west) > np.mean(east):
            reg['w'] = max(west)
            reg['e'] = min(east)
        else:
            reg['w'] = min(west)
            reg['e'] = max(east)
        # get actual location, mapset, ...
        grassenv = grass.gisenv()
        tgtloc = grassenv['LOCATION_NAME']
        tgtmapset = grassenv['MAPSET']
        GISDBASE = grassenv['GISDBASE']
        TGTGISRC = os.environ['GISRC']

    if kv['+proj'] != 'longlat':
        SRCGISRC, TMPLOC = createTMPlocation()
    if options['region'] is None or options['region'] == '':
        north = reg['n']
        south = reg['s']
        east = reg['e']
        west = reg['w']
    else:
        west, south, east, north = options['region'].split(',')
        west = float(west)
        south = float(south)
        east = float(east)
        north = float(north)

    # adjust extents to cover SRTM tiles: 1 degree bounds
    tmpint = int(north)
    if tmpint < north:
        north = tmpint + 1
    else:
        north = tmpint

    tmpint = int(south)
    if tmpint > south:
        south = tmpint - 1
    else:
        south = tmpint

    tmpint = int(east)
    if tmpint < east:
        east = tmpint + 1
    else:
        east = tmpint

    tmpint = int(west)
    if tmpint > west:
        west = tmpint - 1
    else:
        west = tmpint

    if north == south:
        north += 1
    if east == west:
        east += 1

    rows = abs(north - south)
    cols = abs(east - west)
    ntiles = rows * cols
    grass.message(_("Importing %d SRTM tiles...") % ntiles, flag = 'i')
    counter = 1

    srtmtiles = ''
    valid_tiles = 0
    for ndeg in range(south, north):
        for edeg in range(west, east):
            grass.percent(counter, ntiles, 1)
            counter += 1
            if ndeg < 0:
                tile = 'S'
            else:
                tile = 'N'
            tile = tile + '%02d' % abs(ndeg)
            if edeg < 0:
                tile = tile + 'W'
            else:
                tile = tile + 'E'
            tile = tile + '%03d' % abs(edeg)
            grass.debug("Tile: %s" % tile, debug = 1)

            if local != tmpdir:
                gotit = import_local_tile(tile, local, pid, srtmv3, one)
            else:
                gotit = download_tile(tile, url, pid, srtmv3, one, username, password)
                if gotit == 1:
                    gotit = import_local_tile(tile, tmpdir, pid, srtmv3, one)
            if gotit == 1:
                grass.verbose(_("Tile %s successfully imported") % tile)
                valid_tiles += 1
            elif dozerotile:
                # create tile with zeros
                if one:
                    # north
                    if ndeg < -1:
                        tmpn = '%02d:59:59.5S' % (abs(ndeg) - 2)
                    else:
                        tmpn = '%02d:00:00.5N' % (ndeg + 1)
                    # south
                    if ndeg < 1:
                        tmps = '%02d:00:00.5S' % abs(ndeg)
                    else:
                        tmps = '%02d:59:59.5N' % (ndeg - 1)
                    # east
                    if edeg < -1:
                        tmpe = '%03d:59:59.5W' % (abs(edeg) - 2)
                    else:
                        tmpe = '%03d:00:00.5E' % (edeg + 1)
                    # west
                    if edeg < 1:
                        tmpw = '%03d:00:00.5W' % abs(edeg)
                    else:
                        tmpw = '%03d:59:59.5E' % (edeg - 1)
                else:
                    # north
                    if ndeg < -1:
                        tmpn = '%02d:59:58.5S' % (abs(ndeg) - 2)
                    else:
                        tmpn = '%02d:00:01.5N' % (ndeg + 1)
                    # south
                    if ndeg < 1:
                        tmps = '%02d:00:01.5S' % abs(ndeg)
                    else:
                        tmps = '%02d:59:58.5N' % (ndeg - 1)
                    # east
                    if edeg < -1:
                        tmpe = '%03d:59:58.5W' % (abs(edeg) - 2)
                    else:
                        tmpe = '%03d:00:01.5E' % (edeg + 1)
                    # west
                    if edeg < 1:
                        tmpw = '%03d:00:01.5W' % abs(edeg)
                    else:
                        tmpw = '%03d:59:58.5E' % (edeg - 1)

                grass.run_command('g.region', n = tmpn, s = tmps, e = tmpe, w = tmpw, res = res)
                grass.run_command('r.mapcalc', expression = "%s = 0" % (tile + '.r.in.srtm.tmp.' + str(pid)), quiet = True)
                grass.run_command('g.region', region = tmpregionname)


    # g.list with sep = comma does not work ???
    pattern = '*.r.in.srtm.tmp.%d' % pid
    srtmtiles = grass.read_command('g.list', type = 'raster',
                                   pattern = pattern,
                                   sep = 'newline',
                                   quiet = True)

    srtmtiles = srtmtiles.splitlines()
    srtmtiles = ','.join(srtmtiles)
    grass.debug("'List of Tiles: %s" % srtmtiles, debug = 1)

    if valid_tiles == 0:
        grass.run_command('g.remove', type = 'raster', name = str(srtmtiles), flags = 'f', quiet = True)
        grass.warning(_("No tiles imported"))
        if local != tmpdir:
            grass.fatal(_("Please check if local folder <%s> is correct.") % local)
        else:
            grass.fatal(_("Please check internet connection, credentials, and if url <%s> is correct.") % url)

    grass.run_command('g.region', raster = str(srtmtiles))

    grass.message(_("Patching tiles..."))
    if fillnulls == 0:
        if valid_tiles > 1:
            if kv['+proj'] != 'longlat':
                grass.run_command('r.buildvrt', input = srtmtiles, output = output)
            else:
                grass.run_command('r.patch', input = srtmtiles, output = output)
        else:
            grass.run_command('g.rename', raster = '%s,%s' % (srtmtiles, output), quiet = True)
    else:
        ncells = grass.region()['cells']
        if long(ncells) > 1000000000:
            grass.message(_("%s cells to interpolate, this will take some time") % str(ncells), flag = 'i')
        if kv['+proj'] != 'longlat':
            grass.run_command('r.buildvrt', input = srtmtiles, output = output + '.holes')
        else:
            grass.run_command('r.patch', input = srtmtiles, output = output + '.holes')
        mapstats = grass.parse_command('r.univar', map = output + '.holes', flags = 'g', quiet = True)
        if mapstats['null_cells'] == '0':
            grass.run_command('g.rename', raster = '%s,%s' % (output + '.holes', output), quiet = True)
        else:
            grass.run_command('r.resamp.bspline',
                              input = output + '.holes',
                              output = output + '.interp',
                              se = '0.0025', sn = '0.0025',
                              method = 'linear',
                              memory = memory,
                              flags = 'n')
            grass.run_command('r.patch',
                              input = '%s,%s' % (output + '.holes',
                              output + '.interp'),
                              output = output + '.float',
                              flags = 'z')
            grass.run_command('r.mapcalc', expression = '%s = round(%s)' % (output, output + '.float'))
            grass.run_command('g.remove', type = 'raster',
                              name = '%s,%s,%s' % (output + '.holes', output + '.interp', output + '.float'),
                              flags = 'f',
                              quiet = True)


    # switch to target location
    if kv['+proj'] != 'longlat':
        os.environ['GISRC'] = str(TGTGISRC)
        # r.proj
        grass.message(_("Reprojecting <%s>...") % output)
        kwargs = {
            'location': TMPLOC,
            'mapset': 'PERMANENT',
            'input': output,
            'memory': memory,
            'resolution': reproj_res
        }
        if options['method']:
            kwargs['method'] = options['method']
        try:
            grass.run_command('r.proj', **kwargs)
        except CalledModuleError:
            grass.fatal(_("Unable to to reproject raster <%s>") % output)
    else:
        if fillnulls != 0:
            grass.run_command('g.remove', type = 'raster', pattern = pattern, flags = 'f', quiet = True)

    # nice color table
    grass.run_command('r.colors', map = output, color = 'srtm', quiet = True)

    # write metadata:
    tmphist = grass.tempfile()
    f = open(tmphist, 'w+')
    f.write(os.environ['CMDLINE'])
    f.close()
    if srtmv3:
        source1 = 'SRTM V3'
    else:
        source1 = 'SRTM V2.1'
    grass.run_command('r.support', map = output,
                      loadhistory = tmphist,
                      description = 'generated by r.in.srtm.region',
                      source1 = source1,
                      source2 = (local if local != tmpdir else url))
    grass.try_remove(tmphist)

    grass.message(_("Done: generated map <%s>") % output)
Exemplo n.º 27
0
def main():
    options, flags = gs.parser()

    points_name = options["points"]
    points_layer = options["points_layer"]
    points_columns = []
    if options["points_columns"]:
        points_columns = options["points_columns"].split(",")

    # TODO: Add check points exist before we query the metadata.

    input_vector_columns = gs.vector_columns(points_name, points_layer)

    if not points_columns:
        # Get all numeric columns from the table.
        # Get only the names ordered in the same as in the database.
        all_column_names = gs.vector_columns(points_name,
                                             points_layer,
                                             getDict=False)
        for name in all_column_names:
            # TODO: Find out what is the key column (usually cat) and skip it.
            column_type = input_vector_columns[name]["type"]
            if sql_type_is_numeric(column_type):
                points_columns.append(name)
    else:
        # Check the user provided columns.
        for name in points_columns:
            try:
                column_info = input_vector_columns[name]
            except KeyError:
                gs.fatal(
                    _("Column <{name}> not found in vector map <{points_name}>,"
                      " layer <{points_layer}>").format(**locals()))
            column_type = column_info["type"]
            if not sql_type_is_numeric(column_type):
                gs.fatal(
                    _("Column <{name}> in <{points_name}> is {column_type}"
                      " which is not a numeric type").format(**locals()))

    methods = options["method"].split(",")
    stats_columns_names = []
    num_new_columns = len(points_columns) * len(methods)
    if options["stats_columns"]:
        stats_columns_names = options["stats_columns"].split(",")
        names_provided = len(stats_columns_names)
        if names_provided != num_new_columns:
            gs.fatal(
                _("Number of provided stats_columns ({names_provided})"
                  " does not correspond to number of names needed"
                  " for every combination of points_columns and methods"
                  " ({num_points_columns} * {num_methods} = {names_needed})").
                format(
                    names_provided=names_provided,
                    num_points_columns=len(points_columns),
                    num_methods=len(methods),
                    names_needed=num_new_columns,
                ))
        num_unique_names = len(set(stats_columns_names))
        if names_provided != num_unique_names:
            gs.fatal(
                _("Names in stats_columns are not unique"
                  " ({names_provided} items provied"
                  " but only {num_unique_names} are unique)").format(
                      names_provided=names_provided,
                      num_unique_names=num_unique_names,
                  ))

    modified_options = options.copy()
    # Remove options we are handling here.
    del modified_options["points_columns"]
    del modified_options["stats_columns"]
    del modified_options["method"]

    # Note that the count_column is mandatory for v.vect.stats,
    # so it is simply computed more than once. Ideally, it would
    # be optional for this module which would probably mean better handling of
    # number of point and n in statistions in v.vect.stats (unless we simply
    # use temporary name and drop the column at the end).

    # The product function advances the rightmost element on every iteration,
    # so first we get all methods for one column. This is important to
    # the user if stats_columns is provided.
    for i, (points_column,
            method) in enumerate(product(points_columns, methods)):
        if stats_columns_names:
            stats_column_name = stats_columns_names[i]
        else:
            stats_column_name = f"{points_column}_{method}"
        gs.run_command(
            "v.vect.stats",
            method=method,
            points_column=points_column,
            stats_column=stats_column_name,
            quiet=True,
            **modified_options,
            errors="exit",
        )
        gs.percent(i, num_new_columns, 1)
    gs.percent(1, 1, 1)

    return 0
Exemplo n.º 28
0
def main():
    red = options['red']
    green = options['green']
    blue = options['blue']
    brightness = options['strength']
    full = flags['f']
    preserve = flags['p']
    reset = flags['r']
    
    global do_mp

    if flags['s']:
        do_mp = False

    # 90 or 98? MAX value controls brightness
    # think of percent (0-100), must be positive or 0
    # must be more than "2" ?

    if full:
	for i in [red, green, blue]:
	    grass.run_command('r.colors', map = i, color = 'grey', quiet = True)
	sys.exit(0)

    if reset:
	for i in [red, green, blue]:
	    grass.run_command('r.colors', map = i, color = 'grey255', quiet = True)
	sys.exit(0)


    if not preserve:
        if do_mp:
            grass.message(_("Processing..."))
	    # set up jobs and launch them
	    proc = {}
	    conn = {}
	    for i in [red, green, blue]:
	        conn[i] = mp.Pipe()
	        proc[i] = mp.Process(target = get_percentile_mp,
				     args = (i, ['2', brightness],
				     conn[i],))
		proc[i].start()
            grass.percent(1, 2, 1)
            
	    # collect results and wait for jobs to finish
	    for i in [red, green, blue]:
		output_pipe, input_pipe = conn[i]
		(v0, v1) = input_pipe.recv()
		grass.debug('parent (%s) (%.1f, %.1f)' % (i, v0, v1))
		input_pipe.close()
		proc[i].join()
		set_colors(i, v0, v1)
            grass.percent(1, 1, 1)
	else:
	    for i in [red, green, blue]:
	        grass.message(_("Processing..."))
	        (v0, v1) = get_percentile(i, ['2', brightness])
	        grass.debug("<%s>:  min=%f   max=%f" % (i, v0, v1))
	        set_colors(i, v0, v1)

    else:
	all_max = 0
	all_min = 999999

	if do_mp:
	    grass.message(_("Processing..."))
	    # set up jobs and launch jobs
	    proc = {}
	    conn = {}
	    for i in [red, green, blue]:
		conn[i] = mp.Pipe()
		proc[i] = mp.Process(target = get_percentile_mp,
				     args = (i, ['2', brightness],
				     conn[i],))
		proc[i].start()
            grass.percent(1, 2, 1)
            
	    # collect results and wait for jobs to finish
	    for i in [red, green, blue]:
		output_pipe, input_pipe = conn[i]
		(v0, v1) = input_pipe.recv()
		grass.debug('parent (%s) (%.1f, %.1f)' % (i, v0, v1))
		input_pipe.close()
		proc[i].join()
		all_min = min(all_min, v0)
		all_max = max(all_max, v1)
            grass.percent(1, 1, 1)
	else:
	    for i in [red, green, blue]:
		grass.message(_("Processing..."))
		(v0, v1) = get_percentile(i, ['2', brightness])
		grass.debug("<%s>:  min=%f   max=%f" % (i, v0, v1))
		all_min = min(all_min, v0)
		all_max = max(all_max, v1)

	grass.debug("all_min=%f   all_max=%f" % (all_min, all_max))
	for i in [red, green, blue]:
	    set_colors(i, all_min, all_max)


    # write cmd history:
    mapset = grass.gisenv()['MAPSET']
    for i in [red, green, blue]:
        if grass.find_file(i)['mapset'] == mapset:
            grass.raster_history(i)
Exemplo n.º 29
0
def main():

    global TMPLOC, SRCGISRC, TGTGISRC, GISDBASE
    global tile, tmpdir, in_temp, currdir, tmpregionname

    in_temp = False

    nasadem_version = options["version"]
    nasadem_layer = options["layer"]
    url = options["url"]
    username = options["username"]
    password = options["password"]
    local = options["local"]
    output = options["output"]
    dozerotile = flags["z"]
    reproj_res = options["resolution"]

    overwrite = grass.overwrite()

    tile = None
    tmpdir = None
    in_temp = None
    currdir = None
    tmpregionname = None

    if len(local) == 0:
        local = None
        if len(username) == 0 or len(password) == 0:
            grass.fatal(_("NASADEM download requires username and password."))

    # are we in LatLong location?
    s = grass.read_command("g.proj", flags="j")
    kv = grass.parse_key_val(s)

    # make a temporary directory
    tmpdir = grass.tempfile()
    grass.try_remove(tmpdir)
    os.mkdir(tmpdir)
    currdir = os.getcwd()
    pid = os.getpid()

    # change to temporary directory
    os.chdir(tmpdir)
    in_temp = True

    # save region
    tmpregionname = "r_in_nasadem_region_" + str(pid)
    grass.run_command("g.region", save=tmpregionname, overwrite=overwrite)

    # get extents
    if kv["+proj"] == "longlat":
        reg = grass.region()
        if options["region"] is None or options["region"] == "":
            north = reg["n"]
            south = reg["s"]
            east = reg["e"]
            west = reg["w"]
        else:
            west, south, east, north = options["region"].split(",")
            west = float(west)
            south = float(south)
            east = float(east)
            north = float(north)

    else:
        if not options["resolution"]:
            grass.fatal(
                _("The <resolution> must be set if the projection is not 'longlat'.")
            )
        if options["region"] is None or options["region"] == "":
            reg2 = grass.parse_command("g.region", flags="uplg")
            north_vals = [float(reg2["ne_lat"]), float(reg2["nw_lat"])]
            south_vals = [float(reg2["se_lat"]), float(reg2["sw_lat"])]
            east_vals = [float(reg2["ne_long"]), float(reg2["se_long"])]
            west_vals = [float(reg2["nw_long"]), float(reg2["sw_long"])]
            reg = {}
            if np.mean(north_vals) > np.mean(south_vals):
                north = max(north_vals)
                south = min(south_vals)
            else:
                north = min(north_vals)
                south = max(south_vals)
            if np.mean(west_vals) > np.mean(east_vals):
                west = max(west_vals)
                east = min(east_vals)
            else:
                west = min(west_vals)
                east = max(east_vals)
            # get actual location, mapset, ...
            grassenv = grass.gisenv()
            tgtloc = grassenv["LOCATION_NAME"]
            tgtmapset = grassenv["MAPSET"]
            GISDBASE = grassenv["GISDBASE"]
            TGTGISRC = os.environ["GISRC"]
        else:
            grass.fatal(
                _(
                    "The option <resolution> is only supported in the projection 'longlat'"
                )
            )

    # adjust extents to cover SRTM tiles: 1 degree bounds
    tmpint = int(north)
    if tmpint < north:
        north = tmpint + 1
    else:
        north = tmpint

    tmpint = int(south)
    if tmpint > south:
        south = tmpint - 1
    else:
        south = tmpint

    tmpint = int(east)
    if tmpint < east:
        east = tmpint + 1
    else:
        east = tmpint

    tmpint = int(west)
    if tmpint > west:
        west = tmpint - 1
    else:
        west = tmpint

    if north == south:
        north += 1
    if east == west:
        east += 1

    # switch to longlat location
    if kv["+proj"] != "longlat":
        SRCGISRC, TMPLOC = createTMPlocation()

    rows = abs(north - south)
    cols = abs(east - west)
    ntiles = rows * cols
    grass.message(_("Importing %d NASADEM tiles...") % ntiles, flag="i")
    counter = 1

    srtmtiles = ""
    valid_tiles = 0
    for ndeg in range(south, north):
        for edeg in range(west, east):
            grass.percent(counter, ntiles, 1)
            counter += 1
            if ndeg < 0:
                tile = "s"
            else:
                tile = "n"
            tile = tile + "%02d" % abs(ndeg)
            if edeg < 0:
                tile = tile + "w"
            else:
                tile = tile + "e"
            tile = tile + "%03d" % abs(edeg)
            grass.debug("Tile: %s" % tile, debug=1)

            if local is None:
                download_tile(tile, url, pid, nasadem_version, username, password)

            gotit = import_local_tile(tile, local, pid, nasadem_layer)
            if gotit == 1:
                grass.verbose(_("Tile %s successfully imported") % tile)
                valid_tiles += 1
            elif dozerotile:
                # create tile with zeros
                # north
                if ndeg < -1:
                    tmpn = "%02d:59:59.5S" % (abs(ndeg) - 2)
                else:
                    tmpn = "%02d:00:00.5N" % (ndeg + 1)
                # south
                if ndeg < 1:
                    tmps = "%02d:00:00.5S" % abs(ndeg)
                else:
                    tmps = "%02d:59:59.5N" % (ndeg - 1)
                # east
                if edeg < -1:
                    tmpe = "%03d:59:59.5W" % (abs(edeg) - 2)
                else:
                    tmpe = "%03d:00:00.5E" % (edeg + 1)
                # west
                if edeg < 1:
                    tmpw = "%03d:00:00.5W" % abs(edeg)
                else:
                    tmpw = "%03d:59:59.5E" % (edeg - 1)

                grass.run_command("g.region", n=tmpn, s=tmps, e=tmpe, w=tmpw, res=res)
                grass.run_command(
                    "r.mapcalc",
                    expression="%s = 0" % (tile + ".r.in.nasadem.tmp." + str(pid)),
                    quiet=True,
                )
                grass.run_command("g.region", region=tmpregionname)

    # g.list with sep = comma does not work ???
    pattern = "*.r.in.nasadem.tmp.%d" % pid
    demtiles = grass.read_command(
        "g.list", type="raster", pattern=pattern, sep="newline", quiet=True
    )

    demtiles = demtiles.splitlines()
    demtiles = ",".join(demtiles)
    grass.debug("'List of Tiles: %s" % demtiles, debug=1)

    if valid_tiles == 0:
        grass.run_command(
            "g.remove", type="raster", name=str(demtiles), flags="f", quiet=True
        )
        grass.warning(_("No tiles imported"))
        if local is not None:
            grass.fatal(_("Please check if local folder <%s> is correct.") % local)
        else:
            grass.fatal(
                _(
                    "Please check internet connection, credentials, and if url <%s> is correct."
                )
                % url
            )

    grass.run_command("g.region", raster=str(demtiles))

    if valid_tiles > 1:
        grass.message(_("Patching tiles..."))
        if kv["+proj"] != "longlat":
            grass.run_command("r.buildvrt", input=demtiles, output=output)
        else:
            grass.run_command("r.patch", input=demtiles, output=output)
            grass.run_command(
                "g.remove", type="raster", name=str(demtiles), flags="f", quiet=True
            )
    else:
        grass.run_command("g.rename", raster="%s,%s" % (demtiles, output), quiet=True)

    # switch to target location and repoject nasadem
    if kv["+proj"] != "longlat":
        os.environ["GISRC"] = str(TGTGISRC)
        # r.proj
        grass.message(_("Reprojecting <%s>...") % output)
        kwargs = {
            "location": TMPLOC,
            "mapset": "PERMANENT",
            "input": output,
            "resolution": reproj_res,
        }
        if options["memory"]:
            kwargs["memory"] = options["memory"]
        if options["method"]:
            kwargs["method"] = options["method"]
        try:
            grass.run_command("r.proj", **kwargs)
        except CalledModuleError:
            grass.fatal(_("Unable to to reproject raster <%s>") % output)

    # nice color table
    grass.run_command("r.colors", map=output, color="srtm", quiet=True)

    # write metadata:
    tmphist = grass.tempfile()
    f = open(tmphist, "w+")
    # hide username and password
    cmdline = os.environ["CMDLINE"]
    if username is not None and len(username) > 0:
        cmdline = cmdline.replace("=" + username, "=xxx")
    if password is not None and len(password) > 0:
        cmdline = cmdline.replace("=" + password, "=xxx")

    f.write(cmdline)
    f.close()
    source1 = nasadem_version
    grass.run_command(
        "r.support",
        map=output,
        loadhistory=tmphist,
        description="generated by r.in.nasadem",
        source1=source1,
        source2=(local if local != tmpdir else url),
    )
    grass.try_remove(tmphist)

    grass.message(_("Done: generated map <%s>") % output)
Exemplo n.º 30
0
def main(options, flags):

    import model as modellib
    from config import ModelConfig

    try:
        imagesDir = options['images_directory']
        modelPath = options['model']
        classes = options['classes'].split(',')
        if options['band1']:
            band1 = options['band1'].split(',')
            band2 = options['band2'].split(',')
            band3 = options['band3'].split(',')
        else:
            band1 = list()
            band2 = list()
            band3 = list()
        outputType = options['output_type']
        if options['images_format']:
            extension = options['images_format']
            if options['images_format'][0] != '.':
                extension = '.{}'.format(extension)
        else:
            extension = ''

        # a directory where masks and georeferencing will be saved in case of
        # external images
        masksDir = gscript.core.tempfile().rsplit(os.sep, 1)[0]
    except KeyError:
        # GRASS parses keys and values as bytes instead of strings
        imagesDir = options[b'images_directory'].decode('utf-8')
        modelPath = options[b'model'].decode('utf-8')
        classes = options[b'classes'].decode('utf-8').split(',')
        if options[b'band1'].decode('utf-8'):
            band1 = options[b'band1'].decode('utf-8').split(',')
            band2 = options[b'band2'].decode('utf-8').split(',')
            band3 = options[b'band3'].decode('utf-8').split(',')
        else:
            band1 = list()
            band2 = list()
            band3 = list()
        outputType = options[b'output_type'].decode('utf-8')
        if options[b'images_format'].decode('utf-8'):
            extension = options[b'images_format'].decode('utf-8')
            if extension[0] != '.':
                extension = '.{}'.format(extension)
        else:
            extension = ''

        newFlags = dict()
        for flag, value in flags.items():
            newFlags.update({flag.decode('utf-8'): value})
        flags = newFlags

        # a directory where masks and georeferencing will be saved in case of
        # external images
        masksDir = gscript.core.tempfile().decode('utf-8').rsplit(os.sep, 1)[0]

    if len(band1) != len(band2) or len(band2) != len(band3):
        gscript.fatal('Length of band1, band2 and band3 must be equal.')

    # TODO: (3 different brands in case of lot of classes?)
    if len(classes) > 255:
        gscript.fatal('Too many classes. Must be less than 256.')

    if len(set(classes)) != len(classes):
        gscript.fatal('Two or more classes have the same name.')

    # used colour corresponds to class_id
    classesColours = range(len(classes) + 1)

    # Create model object in inference mode.
    config = ModelConfig(numClasses=len(classes) + 1)
    model = modellib.MaskRCNN(mode="inference",
                              model_dir=modelPath,
                              config=config)

    model.load_weights(modelPath, by_name=True)

    masks = list()
    detectedClasses = list()

    # TODO: Use the whole list instead of iteration
    if len(band1) > 0:
        gscript.message('Detecting features in raster maps...')
        # using maps imported in GRASS
        mapsCount = len(band1)
        for i in range(mapsCount):
            gscript.percent(i + 1, mapsCount, 1)
            maskTitle = '{}_{}'.format(band1[i].split('.')[0], i)
            # load map into 3-band np.array
            gscript.run_command('g.region', raster=band1[i], quiet=True)
            bands = np.stack((garray.array(band1[i]), garray.array(
                band2[i]), garray.array(band3[i])), 2)

            # Run detection
            results = model.detect([bands], verbosity=gscript.verbosity())

            # Save results
            for r in results:
                parse_instances(
                    bands,
                    r['rois'],
                    r['masks'],
                    r['class_ids'],
                    # ['BG'] + [i for i in classes],
                    # r['scores'],
                    outputDir=masksDir,
                    which=outputType,
                    title=maskTitle,
                    colours=classesColours,
                    mList=masks,
                    cList=detectedClasses,
                    grassMap=True)

    if imagesDir:
        gscript.message('Detecting features in images from the directory...')
        for imageFile in [
                file for file in next(os.walk(imagesDir))[2]
                if os.path.splitext(file)[1] == extension
        ]:
            image = skimage.io.imread(os.path.join(imagesDir, imageFile))
            if image.shape[2] > 3:
                image = image[:, :, 0:3]
            source = gdal.Open(os.path.join(imagesDir, imageFile))

            sourceProj = source.GetProjection()
            sourceTrans = source.GetGeoTransform()

            # Run detection
            results = model.detect([image], verbosity=gscript.verbosity())

            # Save results
            for r in results:
                parse_instances(
                    image,
                    r['rois'],
                    r['masks'],
                    r['class_ids'],
                    # ['BG'] + [i for i in classes],
                    # r['scores'],
                    outputDir=masksDir,
                    which=outputType,
                    title=imageFile,
                    colours=classesColours,
                    proj=sourceProj,
                    trans=sourceTrans,
                    mList=masks,
                    cList=detectedClasses,
                    externalReferencing=flags['e'])

    if flags['e']:
        gscript.message('Masks detected. Georeferencing masks...')
        external_georeferencing(imagesDir, classes, masksDir, masks,
                                detectedClasses, extension)

    gscript.message('Converting masks to vectors...')
    masksString = ','.join(masks)
    for parsedClass in detectedClasses:
        gscript.message('Processing {} map...'.format(classes[parsedClass -
                                                              1]))
        i = 0
        for maskName in masks:
            gscript.percent(i, len(masks), 1)
            gscript.run_command('g.region', raster=maskName, quiet=True)
            gscript.run_command('r.mask',
                                raster=maskName,
                                maskcats=classesColours[parsedClass],
                                quiet=True)
            gscript.run_command('r.to.vect',
                                's',
                                input=maskName,
                                output=maskName,
                                type=outputType,
                                quiet=True)
            gscript.run_command('r.mask', 'r', quiet=True)
            i += 1

        gscript.run_command('v.patch',
                            input=masksString,
                            output=classes[parsedClass - 1])
        gscript.run_command('g.remove',
                            'f',
                            name=masksString,
                            type='vector',
                            quiet=True)
    gscript.run_command('g.remove',
                        'f',
                        name=masksString,
                        type='raster',
                        quiet=True)
Exemplo n.º 31
0
def main(options, flags):

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    base = options["basename"]
    nprocs = int(options["nprocs"])
    step = options["step"]
    levels = options["levels"]
    minlevel = options["minlevel"]
    maxlevel = options["maxlevel"]
    cut = options["cut"]

    register_null = flags["n"]
    t_flag = flags["t"]
    

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = gscript.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        gscript.warning(_("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    # Check the new stvds
    new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif,
                                 overwrite=overwrite)
                                               
    # Setup the flags
    flags = ""
    if t_flag is True:
        flags += "t"
    
    # Configure the r.to.vect module
    contour_module = pymod.Module("r.contour", input="dummy",
                                   output="dummy", run_=False,
                                   finish_=False, flags=flags,
                                   overwrite=overwrite,
                                   quiet=True)

    if step:
        contour_module.inputs.step = float(step)
    if minlevel:
        contour_module.inputs.minlevel = float(minlevel)
    if maxlevel:
        contour_module.inputs.maxlevel = float(maxlevel)
    if levels:
        contour_module.inputs.levels = levels.split(",")
    if cut:
        contour_module.inputs.cut = int(cut)

    # The module queue for parallel execution, except if attribute tables should
    # be created. Then force single process use
    if t_flag is False:
        if nprocs > 1:
            nprocs = 1
            gscript.warning(_("The number of parellel r.contour processes was "\
                              "reduced to 1 because of the table attribute "\
                              "creation"))
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.to.vect all selected maps
    for map in maps:
        count += 1
        map_name = "%s_%i" % (base, count)
        new_map = tgis.open_new_map_dataset(map_name, None, type="vector",
                                            temporal_extent=map.get_temporal_extent(),
                                            overwrite=overwrite, dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(contour_module)
        mod(input=map.get_id(), output=new_map.get_id())
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time vector dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "stvds", ttype, title,
                                descr, stype, dbif, overwrite)
    # collect empty maps to remove them
    num_maps = len(new_maps)
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

        # Do not register empty maps
        try:
            if map.load() is not True:
                continue
        except FatalError:
            continue
        if map.metadata.get_number_of_primitives() == 0:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    gscript.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        gscript.run_command("g.remove", flags='f', type='vector', name=names, 
                            quiet=True)

    dbif.close()
Exemplo n.º 32
0
def main():
    # Hard-coded parameters needed for USGS datasets
    usgs_product_dict = {
        "ned": {
            'product': 'National Elevation Dataset (NED)',
            'dataset': {
                'ned1sec': (1. / 3600, 30, 100),
                'ned13sec': (1. / 3600 / 3, 10, 30),
                'ned19sec': (1. / 3600 / 9, 3, 10)
            },
            'subset': {},
            'extent': ['1 x 1 degree', '15 x 15 minute'],
            'format': 'IMG',
            'extension': 'img',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'bilinear',
            'url_split': '/'
        },
        "nlcd": {
            'product': 'National Land Cover Database (NLCD)',
            'dataset': {
                'National Land Cover Database (NLCD) - 2001':
                (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2006':
                (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2011':
                (1. / 3600, 30, 100)
            },
            'subset': {
                'Percent Developed Imperviousness', 'Percent Tree Canopy',
                'Land Cover'
            },
            'extent': ['3 x 3 degree'],
            'format': 'GeoTIFF',
            'extension': 'tif',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "naip": {
            'product': 'USDA National Agriculture Imagery Program (NAIP)',
            'dataset': {
                'Imagery - 1 meter (NAIP)': (1. / 3600 / 27, 1, 3)
            },
            'subset': {},
            'extent': [
                '3.75 x 3.75 minute',
            ],
            'format': 'JPEG2000',
            'extension': 'jp2',
            'zip': False,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        }
    }

    # Set GRASS GUI options and flags to python variables
    gui_product = options['product']

    # Variable assigned from USGS product dictionary
    nav_string = usgs_product_dict[gui_product]
    product = nav_string['product']
    product_format = nav_string['format']
    product_extension = nav_string['extension']
    product_is_zip = nav_string['zip']
    product_srs = nav_string['srs']
    product_proj4 = nav_string['srs_proj4']
    product_interpolation = nav_string['interpolation']
    product_url_split = nav_string['url_split']
    product_extent = nav_string['extent']
    gui_subset = None

    # Parameter assignments for each dataset
    if gui_product == 'ned':
        gui_dataset = options['ned_dataset']
        ned_api_name = ''
        if options['ned_dataset'] == 'ned1sec':
            ned_data_abbrv = 'ned_1arc_'
            ned_api_name = '1 arc-second'
        if options['ned_dataset'] == 'ned13sec':
            ned_data_abbrv = 'ned_13arc_'
            ned_api_name = '1/3 arc-second'
        if options['ned_dataset'] == 'ned19sec':
            ned_data_abbrv = 'ned_19arc_'
            ned_api_name = '1/9 arc-second'
        product_tag = product + " " + ned_api_name

    if gui_product == 'nlcd':
        gui_dataset = options['nlcd_dataset']
        if options['nlcd_dataset'] == 'nlcd2001':
            gui_dataset = 'National Land Cover Database (NLCD) - 2001'
        if options['nlcd_dataset'] == 'nlcd2006':
            gui_dataset = 'National Land Cover Database (NLCD) - 2006'
        if options['nlcd_dataset'] == 'nlcd2011':
            gui_dataset = 'National Land Cover Database (NLCD) - 2011'

        if options['nlcd_subset'] == 'landcover':
            gui_subset = 'Land Cover'
        if options['nlcd_subset'] == 'impervious':
            gui_subset = 'Percent Developed Imperviousness'
        if options['nlcd_subset'] == 'canopy':
            gui_subset = 'Percent Tree Canopy'
        product_tag = gui_dataset

    if gui_product == 'naip':
        gui_dataset = 'Imagery - 1 meter (NAIP)'
        product_tag = nav_string['product']

    # Assigning further parameters from GUI
    gui_output_layer = options['output_name']
    gui_resampling_method = options['resampling_method']
    gui_i_flag = flags['i']
    gui_k_flag = flags['k']
    work_dir = options['output_directory']

    # Returns current units
    try:
        proj = gscript.parse_command('g.proj', flags='g')
        if gscript.locn_is_latlong():
            product_resolution = nav_string['dataset'][gui_dataset][0]
        elif float(proj['meters']) == 1:
            product_resolution = nav_string['dataset'][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string['dataset'][gui_dataset][2]
    except TypeError:
        product_resolution = False

    if gui_resampling_method == 'default':
        gui_resampling_method = nav_string['interpolation']
        gscript.verbose(
            _("The default resampling method for product {product} is {res}").
            format(product=gui_product, res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    min_coords = gscript.read_command('m.proj',
                                      coordinates=(gregion['w'], gregion['s']),
                                      proj_out=product_proj4,
                                      separator='comma',
                                      flags='d')
    max_coords = gscript.read_command('m.proj',
                                      coordinates=(gregion['e'], gregion['n']),
                                      proj_out=product_proj4,
                                      separator='comma',
                                      flags='d')
    min_list = min_coords.split(',')[:2]
    max_list = max_coords.split(',')[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = urllib.quote_plus(gui_prod_str)
    prod_format = urllib.quote_plus(product_format)
    prod_extent = urllib.quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://viewer.nationalmap.gov/tnmaccess/api/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == 'nlcd':
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try:
        TNM_API_GET = urllib2.urlopen(TNM_API_URL, timeout=12)
    except urllib2.URLError:
        gscript.fatal(
            _("USGS TNM API query has timed out. Check network configuration. Please try again."
              ))
    except:
        gscript.fatal(
            _("USGS TNM API query has timed out. Check network configuration. Please try again."
              ))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON['errors']:
            TNM_API_error = return_JSON['errors']
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)
        if f['datasets'][0] not in dataset_name:
            if len(dataset_name) <= 1:
                dataset_name.append(str(f['datasets'][0]))

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON['total'])
    tiles_needed_count = 0
    size_diff_tolerance = 5
    exist_dwnld_size = 0
    if tile_API_count > 0:
        dwnld_size = []
        dwnld_url = []
        dataset_name = []
        TNM_file_titles = []
        exist_dwnld_url = []
        exist_TNM_titles = []
        exist_zip_list = []
        exist_tile_list = []
        extract_zip_list = []
        # for each file returned, assign variables to needed parameters
        for f in return_JSON['items']:
            TNM_file_title = f['title']
            TNM_file_URL = str(f['downloadURL'])
            TNM_file_size = int(f['sizeInBytes'])
            TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
            if gui_product == 'ned':
                local_file_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
                local_zip_path = os.path.join(work_dir,
                                              ned_data_abbrv + TNM_file_name)
                local_tile_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
            else:
                local_file_path = os.path.join(work_dir, TNM_file_name)
                local_zip_path = os.path.join(work_dir, TNM_file_name)
                local_tile_path = os.path.join(work_dir, TNM_file_name)
            file_exists = os.path.exists(local_file_path)
            file_complete = None
            # if file exists, but is incomplete, remove file and redownload
            if file_exists:
                existing_local_file_size = os.path.getsize(local_file_path)
                # if local file is incomplete
                if abs(existing_local_file_size -
                       TNM_file_size) > size_diff_tolerance:
                    # add file to cleanup list
                    cleanup_list.append(local_file_path)
                    # NLCD API query returns subsets that cannot be filtered before
                    # results are returned. gui_subset is used to filter results.
                    if not gui_subset:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            down_list()
                        else:
                            continue
                else:
                    if not gui_subset:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            exist_list()
                            exist_dwnld_size += TNM_file_size
                        else:
                            continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                        continue

    # return fatal error if API query returns no results for GUI input
    elif tile_API_count == 0:
        gscript.fatal(
            _("TNM API ERROR or Zero tiles available for given input parameters."
              ))

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    if exist_tile_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)
    if cleanup_list:
        cleanup_msg = _(
            "\n{0} existing incomplete file(s) detected and removed. Run module again."
        ).format(len(cleanup_list))
        gscript.fatal(cleanup_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = '0'

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = 'none'

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == 'nlcd':
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = "NLCD {0} data unavailable for input parameters".format(
                        gui_subset)
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
            "USGS file(s) to download:",
            "-------------------------",
            "Total download size:\t{size}",
            "Tile count:\t{count}",
            "USGS SRS:\t{srs}",
            "USGS tile titles:\n{tile}",
            "-------------------------",
        )
        data_info = '\n'.join(data_info).format(size=total_size_str,
                                                count=file_download_count,
                                                srs=product_srs,
                                                tile=TNM_file_titles_info)
    print data_info

    if gui_i_flag:
        gscript.info(
            _("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == 'ned':
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            # download files in chunks rather than write complete files to memory
            dwnld_req = urllib2.urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()['Content-Length'])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
            local_file.close()
            download_count += 1
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
            file_complete = "Download {0} of {1}: COMPLETE".format(
                download_count, TNM_count)
            gscript.info(file_complete)
        except urllib2.URLError:
            gscript.fatal(
                _("USGS download request has timed out. Network or formatting error."
                  ))
        except StandardError:
            cleanup_list.append(local_file_path)
            if download_count:
                file_failed = "Download {0} of {1}: FAILED".format(
                    download_count, TNM_count)
                gscript.fatal(file_failed)

    # sets already downloaded zip files or tiles to be extracted or imported
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        for z in local_zip_path_list:
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.endswith(product_extension):
                            extracted_tile = os.path.join(work_dir, str(f))
                            if os.path.exists(extracted_tile):
                                os.remove(extracted_tile)
                                read_zip.extract(f, work_dir)
                            else:
                                read_zip.extract(f, work_dir)
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    cleanup_list.append(extracted_tile)
            except:
                cleanup_list.append(extracted_tile)
                gscript.fatal(
                    _("Unable to locate or extract IMG file from ZIP archive.")
                )

    # operations for extracted or complete files available locally
    for t in local_tile_path_list:
        # create variables for use in GRASS GIS import process
        LT_file_name = os.path.basename(t)
        LT_layer_name = os.path.splitext(LT_file_name)[0]
        patch_names.append(LT_layer_name)
        in_info = ("Importing and reprojecting {0}...").format(LT_file_name)
        gscript.info(in_info)
        # import to GRASS GIS
        try:
            gscript.run_command('r.import',
                                input=t,
                                output=LT_layer_name,
                                resolution='value',
                                resolution_value=product_resolution,
                                extent="region",
                                resample=product_interpolation)
            # do not remove by default with NAIP, there are no zip files
            if gui_product != 'naip' or not gui_k_flag:
                cleanup_list.append(t)
        except CalledModuleError:
            in_error = ("Unable to import '{0}'").format(LT_file_name)
            gscript.fatal(in_error)

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if completed_tiles_count > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command('g.region',
                                        res=product_resolution,
                                        flags='a')
                if gui_product == 'naip':
                    for i in ('1', '2', '3', '4'):
                        patch_names_i = [
                            name + '.' + i for name in patch_names
                        ]
                        gscript.run_command('r.patch',
                                            input=patch_names_i,
                                            output=gui_output_layer + '.' + i)
                else:
                    gscript.run_command('r.patch',
                                        input=patch_names,
                                        output=gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added"
                            ).format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if 'k' flag
                if not gui_k_flag:
                    if gui_product == 'naip':
                        for i in ('1', '2', '3', '4'):
                            patch_names_i = [
                                name + '.' + i for name in patch_names
                            ]
                            gscript.run_command('g.remove',
                                                type='raster',
                                                name=patch_names_i,
                                                flags='f')
                    else:
                        gscript.run_command('g.remove',
                                            type='raster',
                                            name=patch_names,
                                            flags='f')
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
        elif completed_tiles_count == 1:
            if gui_product == 'naip':
                for i in ('1', '2', '3', '4'):
                    gscript.run_command('g.rename',
                                        raster=(patch_names[0] + '.' + i,
                                                gui_output_layer + '.' + i))
            else:
                gscript.run_command('g.rename',
                                    raster=(patch_names[0], gui_output_layer))
        temp_down_count = "\n{0} of {1} tile/s succesfully imported and patched.".format(
            completed_tiles_count, tiles_needed_count)
        gscript.info(temp_down_count)
    else:
        gscript.fatal("Error downloading files. Please retry.")

    # Keep source files if 'k' flag active
    if gui_k_flag:
        src_msg = (
            "<k> flag selected: Source tiles remain in '{0}'").format(work_dir)
        gscript.info(src_msg)

    # set appropriate color table
    if gui_product == 'ned':
        gscript.run_command('r.colors',
                            map=gui_output_layer,
                            color='elevation')

    # composite NAIP
    if gui_product == 'naip':
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=gui_output_layer + '.1')
        gscript.run_command('r.composite',
                            red=gui_output_layer + '.1',
                            green=gui_output_layer + '.2',
                            blue=gui_output_layer + '.3',
                            output=gui_output_layer)
        gscript.del_temp_region()
Exemplo n.º 33
0
def main():
    import matplotlib  # required by windows
    matplotlib.use('wxAGG')  # required by windows
    import matplotlib.pyplot as plt

    if flags['w']:
        resampling_flags = 'w'
    else:
        resampling_flags = ''

    input = options['input']
    output = None
    if options['csv_output']:
        output = options['csv_output']
    plot_output = None
    if options['plot_output']:
        plot_output = options['plot_output']
    min_cells = False
    if options['min_cells']:
        min_cells = int(options['min_cells'])
    target_res = None
    if options['max_size']:
        target_res = float(options['max_size'])
    step = float(options['step'])

    global temp_resamp_map, temp_variance_map
    temp_resamp_map = str()  # define when running the 'base case'
    temp_variance_map = "temp_variance_map_%d" % os.getpid()
    resolutions = []
    variances = []

    region = gscript.parse_command('g.region', flags='g')
    cells = int(region['cells'])
    res = (float(region['nsres']) + float(region['ewres'])) / 2
    north = float(region['n'])
    south = float(region['s'])
    west = float(region['w'])
    east = float(region['e'])

    if res % 1 == 0 and step % 1 == 0:
        template_string = "%d,%f\n"
    else:
        template_string = "%f,%f\n"

    if min_cells:
        target_res_cells = int(sqrt(((east - west) * (north - south)) / min_cells))
        if target_res > target_res_cells:
            target_res = target_res_cells
            gscript.message(_("Max resolution leads to less cells than defined by 'min_cells' (%d)." % min_cells))
            gscript.message(_("Max resolution reduced to %d" % target_res))

    nb_iterations = target_res - res / step
    if nb_iterations < 3:
        message = _("Less than 3 iterations. Cannot determine maxima.\n")
        message += _("Please increase max_size or reduce min_cells.")
        gscript.fatal(_(message))

    gscript.use_temp_region()

    gscript.message(_("Calculating variance at different resolutions"))
    counter = 1
    while res <= target_res:
        gscript.percent(res, target_res, step)

        # base case
        if not temp_resamp_map:
            temp_resamp_map = "temp_resamp_map_%d" % os.getpid()
            # print('Base case', counter)
            gscript.run_command('r.resamp.stats',
                            input=input,
                            output=temp_resamp_map,
                            method='average',
                            flags=resampling_flags,
                            quiet=True,
                            overwrite=True)
        else:
            counter += 1
            # print('Existing resampled map', counter)
            gscript.run_command('r.resamp.stats',
                            input=temp_resamp_map,
                            output=temp_resamp_map,
                            method='average',
                            flags=resampling_flags,
                            quiet=True,
                            overwrite=True)

        gscript.run_command('r.neighbors',
                            input=temp_resamp_map,
                            method='variance',
                            output=temp_variance_map,
                            quiet=True,
                            overwrite=True)
        varianceinfo = gscript.parse_command('r.univar',
                                             map_=temp_variance_map,
                                             flags='g',
                                             quiet=True)
        resolutions.append(res)
        variances.append(float(varianceinfo['mean']))
        res += step
        region = gscript.parse_command('g.region',
                            res=res,
                            n=north,
                            s=south,
                            w=west,
                            e=east,
                            flags='ag')
        gscript.verbose('resolution (step {}): {}'.format(counter, res))
        cells = int(region['cells'])

    indices, differences = FindMaxima(variances)
    max_resolutions = [resolutions[x] for x in indices]
    gscript.message(_("resolution,min_diff"))
    for i in range(len(max_resolutions)):
        print("%g,%g" % (max_resolutions[i], differences[i]))

    if output:
        header = "resolution,variance\n"
        of = open(output, 'w')
        of.write(header)
        for i in range(len(resolutions)):
            output_string = template_string % (resolutions[i], variances[i])
            of.write(output_string)
        of.close()

    if plot_output:
        plt.plot(resolutions, variances)
        plt.xlabel("Resolution")
        plt.ylabel("Variance")
        plt.grid(True)
        if plot_output == '-':
            plt.show()
        else:
            plt.savefig(plot_output)
Exemplo n.º 34
0
def main():
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    use_raster_region = flags["r"]
    method = options["method"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors",
                                   input="dummy",
                                   output="dummy",
                                   run_=False,
                                   finish_=False,
                                   size=int(size),
                                   method=method,
                                   overwrite=overwrite,
                                   quiet=True)

    gregion_module = pymod.Module(
        "g.region",
        raster="dummy",
        run_=False,
        finish_=False,
    )

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(
                map.temporal_extent.get_start_time(), sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffix(base, count, time_suffix)

        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())

        if use_raster_region is True:
            reg = copy.deepcopy(gregion_module)
            reg(raster=map.get_id())
            print(reg.get_bash())
            print(mod.get_bash())
            mm = pymod.MultiModule([reg, mod],
                                   sync=False,
                                   set_temp_region=True)
            process_queue.put(mm)
        else:
            print(mod.get_bash())
            process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()
    proc_list = process_queue.get_finished_modules()

    # Check return status of all finished modules
    error = 0
    for proc in proc_list:
        if proc.popen.returncode != 0:
            grass.error(
                _("Error running module: %\n    stderr: %s") %
                (proc.get_bash(), proc.outputs.stderr))
            error += 1

    if error > 0:
        grass.fatal(_("Error running modules."))

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
Exemplo n.º 35
0
 def _onCmdProgress(self, event):
     """Update progress message info"""
     grass.percent(event.value, 100, 1)
     event.Skip()
Exemplo n.º 36
0
def main():
    # Get the options
    input = options["input"]
    output = options["output"]
    start = options["start"]
    stop = options["stop"]
    base = options["basename"]
    cycle = options["cycle"]
    lower = options["lower"]
    upper = options["upper"]
    offset = options["offset"]
    limits = options["limits"]
    shift = options["shift"]
    scale = options["scale"]
    method = options["method"]
    granularity = options["granularity"]
    register_null = flags["n"]
    reverse = flags["r"]

    # Make sure the temporal database exists
    tgis.init()

    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    mapset = tgis.get_current_mapset()

    if input.find("@") >= 0:
        id = input
    else:
        id = input + "@" + mapset

    input_strds = tgis.SpaceTimeRasterDataset(id)

    if input_strds.is_in_db() == False:
        dbif.close()
        grass.fatal(_("Space time raster dataset <%s> not found") % (id))

    input_strds.select(dbif)

    if output.find("@") >= 0:
        out_id = output
    else:
        out_id = output + "@" + mapset

    # The output space time raster dataset
    output_strds = tgis.SpaceTimeRasterDataset(out_id)
    if output_strds.is_in_db(dbif):
        if not grass.overwrite():
            dbif.close()
            grass.fatal(_("Space time raster dataset <%s> is already in the "
                          "database, use overwrite flag to overwrite") % out_id)

    if tgis.check_granularity_string(granularity,
                                     input_strds.get_temporal_type()) == False:
            dbif.close()
            grass.fatal(_("Invalid granularity"))

    if tgis.check_granularity_string(cycle,
                                     input_strds.get_temporal_type()) == False:
            dbif.close()
            grass.fatal(_("Invalid cycle"))

    if offset:
        if tgis.check_granularity_string(offset,
                                         input_strds.get_temporal_type()) == False:
                dbif.close()
                grass.fatal(_("Invalid offset"))

    # The lower threshold space time raster dataset
    if lower:
        if not range:
            dbif.close()
            grass.fatal(_("You need to set the range to compute the occurrence"
                          " space time raster dataset"))

        if lower.find("@") >= 0:
            lower_id = lower
        else:
            lower_id = lower + "@" + mapset

        lower_strds = tgis.SpaceTimeRasterDataset(lower_id)
        if lower_strds.is_in_db() == False:
            dbif.close()
            grass.fatal(_("Space time raster dataset <%s> not found") % (lower_strds.get_id()))

        if lower_strds.get_temporal_type() != input_strds.get_temporal_type():
            dbif.close()
            grass.fatal(_("Temporal type of input strds and lower strds must be equal"))

        lower_strds.select(dbif)

    # The upper threshold space time raster dataset
    if upper:
        if not lower:
            dbif.close()
            grass.fatal(_("The upper option works only in conjunction with the lower option"))

        if upper.find("@") >= 0:
            upper = upper
        else:
            upper_id = upper + "@" + mapset

        upper_strds = tgis.SpaceTimeRasterDataset(upper_id)
        if upper_strds.is_in_db() == False:
            dbif.close()
            grass.fatal(_("Space time raster dataset <%s> not found") % (upper_strds.get_id()))

        if upper_strds.get_temporal_type() != input_strds.get_temporal_type():
            dbif.close()
            grass.fatal(_("Temporal type of input strds and upper strds must be equal"))

        upper_strds.select(dbif)

    input_strds_start, input_strds_end = input_strds.get_temporal_extent_as_tuple()

    if input_strds.is_time_absolute():
        start = tgis.string_to_datetime(start)
        if stop:
            stop = tgis.string_to_datetime(stop)
        else:
            stop = input_strds_end
        start = tgis.adjust_datetime_to_granularity(start, granularity)
    else:
        start = int(start)
        if stop:
            stop = int(stop)
        else:
            stop = input_strds_end

    if input_strds.is_time_absolute():
        end = tgis.increment_datetime_by_string(start, cycle)
    else:
        end = start + cycle

    limit_relations = ["EQUALS", "DURING", "OVERLAPS", "OVERLAPPING", "CONTAINS"]

    count = 1
    output_maps = []


    while input_strds_end > start and stop > start:

        # Make sure that the cyclic computation will stop at the correct time
        if stop and end > stop:
            end = stop

        where = "start_time >= \'%s\' AND start_time < \'%s\'"%(str(start),
                                                                str(end))
        input_maps = input_strds.get_registered_maps_as_objects(where=where,
                                                                dbif=dbif)

        grass.message(_("Processing cycle %s - %s"%(str(start), str(end))))

        if len(input_maps) == 0:
            continue

        # Lets create a dummy list of maps with granularity conform intervals
        gran_list = []
        gran_list_low = []
        gran_list_up = []
        gran_start = start
        while gran_start < end:
            map = input_strds.get_new_map_instance("%i@%i"%(count, count))
            if input_strds.is_time_absolute():
                gran_end = tgis.increment_datetime_by_string(gran_start,
                                                             granularity)
                map.set_absolute_time(gran_start, gran_end)
                gran_start = tgis.increment_datetime_by_string(gran_start,
                                                               granularity)
            else:
                gran_end = gran_start + granularity
                map.set_relative_time(gran_start, gran_end,
                                      input_strds.get_relative_time_unit())
                gran_start = gran_start + granularity
            gran_list.append(copy(map))
            gran_list_low.append(copy(map))
            gran_list_up.append(copy(map))
        # Lists to compute the topology with upper and lower datasets

        # Create the topology between the granularity conform list and all maps
        # of the current cycle
        gran_topo = tgis.SpatioTemporalTopologyBuilder()
        gran_topo.build(gran_list, input_maps)

        if lower:
            lower_maps = lower_strds.get_registered_maps_as_objects(dbif=dbif)
            gran_lower_topo = tgis.SpatioTemporalTopologyBuilder()
            gran_lower_topo.build(gran_list_low, lower_maps)

        if upper:
            upper_maps = upper_strds.get_registered_maps_as_objects(dbif=dbif)
            gran_upper_topo = tgis.SpatioTemporalTopologyBuilder()
            gran_upper_topo.build(gran_list_up, upper_maps)

        old_map_name = None

        # Aggregate
        num_maps = len(gran_list)

        for i in xrange(num_maps):
            if reverse:
                map = gran_list[num_maps - i - 1]
            else:
                map = gran_list[i]
            # Select input maps based on temporal topology relations
            input_maps = []
            if map.get_equal():
                input_maps += map.get_equal()
            elif map.get_contains():
                input_maps += map.get_contains()
            elif map.get_overlaps():
                input_maps += map.get_overlaps()
            elif map.get_overlapped():
                input_maps += map.get_overlapped()
            elif map.get_during():
                input_maps += map.get_during()

            # Check input maps
            if len(input_maps) == 0:
                continue

            # New output map
            output_map_name = "%s_%i" % (base, count)
            output_map_id = map.build_id(output_map_name, mapset)
            output_map = input_strds.get_new_map_instance(output_map_id)

            # Check if new map is in the temporal database
            if output_map.is_in_db(dbif):
                if grass.overwrite():
                    # Remove the existing temporal database entry
                    output_map.delete(dbif)
                    output_map = input_strds.get_new_map_instance(output_map_id)
                else:
                    grass.fatal(_("Map <%s> is already registered in the temporal"
                                 " database, use overwrite flag to overwrite.") %
                                (output_map.get_map_id()))

            map_start, map_end = map.get_temporal_extent_as_tuple()

            if map.is_time_absolute():
                output_map.set_absolute_time(map_start, map_end)
            else:
                output_map.set_relative_time(map_start, map_end,
                                             map.get_relative_time_unit())

            limits_vals = limits.split(",")
            limits_lower = float(limits_vals[0])
            limits_upper = float(limits_vals[1])

            lower_map_name = None
            if lower:
                relations = gran_list_low[i].get_temporal_relations()
                for relation in limit_relations:
                    if relation in relations:
                        lower_map_name = str(relations[relation][0].get_id())
                        break

            upper_map_name = None
            if upper:
                relations = gran_list_up[i].get_temporal_relations()
                for relation in limit_relations:
                    if relation in relations:
                        upper_map_name = str(relations[relation][0].get_id())
                        break

            input_map_names = []
            for input_map in input_maps:
                input_map_names.append(input_map.get_id())

            # Set up the module
            accmod = Module("r.series.accumulate", input=input_map_names,
                            output=output_map_name, run_=False)

            if old_map_name:
                accmod.inputs["basemap"].value = old_map_name
            if lower_map_name:
                accmod.inputs["lower"].value = lower_map_name
            if upper_map_name:
                accmod.inputs["upper"].value = upper_map_name

            accmod.inputs["limits"].value = (limits_lower, limits_upper)

            if shift:
                accmod.inputs["shift"].value = float(shift)

            if scale:
                accmod.inputs["scale"].value = float(scale)

            if method:
                accmod.inputs["method"].value = method

            print accmod
            accmod.run()

            if accmod.popen.returncode != 0:
                dbif.close()
                grass.fatal(_("Error running r.series.accumulate"))

            output_maps.append(output_map)
            old_map_name = output_map_name
            count += 1

        # Increment the cycle
        start = end
        if input_strds.is_time_absolute():
            start = end
            if offset:
                start = tgis.increment_datetime_by_string(end, offset)

            end = tgis.increment_datetime_by_string(start, cycle)
        else:
            if offset:
                start = end + offset
            end = start + cycle

    # Insert the maps into the output space time dataset
    if output_strds.is_in_db(dbif):
        if grass.overwrite():
            output_strds.delete(dbif)
            output_strds = input_strds.get_new_instance(out_id)

    temporal_type, semantic_type, title, description = input_strds.get_initial_values()
    output_strds.set_initial_values(temporal_type, semantic_type, title,
                                    description)
    output_strds.insert(dbif)

    empty_maps = []
    # Register the maps in the database
    count = 0
    for output_map in output_maps:
        count += 1
        if count%10 == 0:
            grass.percent(count, len(output_maps), 1)
        # Read the raster map data
        output_map.load()
        # In case of a empty map continue, do not register empty maps

        if not register_null:
            if output_map.metadata.get_min() is None and \
                output_map.metadata.get_max() is None:
                empty_maps.append(output_map)
                continue

        # Insert map in temporal database
        output_map.insert(dbif)
        output_strds.register_map(output_map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    output_strds.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    dbif.close()

    # Remove empty maps
    if len(empty_maps) > 0:
        for map in empty_maps:
            grass.run_command("g.remove", flags='f', type="rast",  pattern=map.get_name(), quiet=True)
Exemplo n.º 37
0
def main():
    # lazy imports
    import grass.temporal as tgis
    from grass.pygrass.modules import Module

    # Get the options
    input = options["input"]
    output = options["output"]
    start = options["start"]
    stop = options["stop"]
    base = options["basename"]
    cycle = options["cycle"]
    lower = options["lower"]
    upper = options["upper"]
    offset = options["offset"]
    limits = options["limits"]
    shift = options["shift"]
    scale = options["scale"]
    method = options["method"]
    granularity = options["granularity"]
    register_null = flags["n"]
    reverse = flags["r"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()

    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    mapset = tgis.get_current_mapset()

    if input.find("@") >= 0:
        id = input
    else:
        id = input + "@" + mapset

    input_strds = tgis.SpaceTimeRasterDataset(id)

    if input_strds.is_in_db() == False:
        dbif.close()
        grass.fatal(_("Space time raster dataset <%s> not found") % (id))

    input_strds.select(dbif)

    if output.find("@") >= 0:
        out_id = output
    else:
        out_id = output + "@" + mapset

    # The output space time raster dataset
    output_strds = tgis.SpaceTimeRasterDataset(out_id)
    if output_strds.is_in_db(dbif):
        if not grass.overwrite():
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> is already in the "
                  "database, use overwrite flag to overwrite") % out_id)

    if tgis.check_granularity_string(granularity,
                                     input_strds.get_temporal_type()) == False:
        dbif.close()
        grass.fatal(_("Invalid granularity"))

    if tgis.check_granularity_string(cycle,
                                     input_strds.get_temporal_type()) == False:
        dbif.close()
        grass.fatal(_("Invalid cycle"))

    if offset:
        if tgis.check_granularity_string(
                offset, input_strds.get_temporal_type()) == False:
            dbif.close()
            grass.fatal(_("Invalid offset"))

    # The lower threshold space time raster dataset
    if lower:
        if not range:
            dbif.close()
            grass.fatal(
                _("You need to set the range to compute the occurrence"
                  " space time raster dataset"))

        if lower.find("@") >= 0:
            lower_id = lower
        else:
            lower_id = lower + "@" + mapset

        lower_strds = tgis.SpaceTimeRasterDataset(lower_id)
        if lower_strds.is_in_db() == False:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> not found") %
                (lower_strds.get_id()))

        if lower_strds.get_temporal_type() != input_strds.get_temporal_type():
            dbif.close()
            grass.fatal(
                _("Temporal type of input strds and lower strds must be equal")
            )

        lower_strds.select(dbif)

    # The upper threshold space time raster dataset
    if upper:
        if not lower:
            dbif.close()
            grass.fatal(
                _("The upper option works only in conjunction with the lower option"
                  ))

        if upper.find("@") >= 0:
            upper = upper
        else:
            upper_id = upper + "@" + mapset

        upper_strds = tgis.SpaceTimeRasterDataset(upper_id)
        if upper_strds.is_in_db() == False:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> not found") %
                (upper_strds.get_id()))

        if upper_strds.get_temporal_type() != input_strds.get_temporal_type():
            dbif.close()
            grass.fatal(
                _("Temporal type of input strds and upper strds must be equal")
            )

        upper_strds.select(dbif)

    input_strds_start, input_strds_end = input_strds.get_temporal_extent_as_tuple(
    )

    if input_strds.is_time_absolute():
        start = tgis.string_to_datetime(start)
        if stop:
            stop = tgis.string_to_datetime(stop)
        else:
            stop = input_strds_end
        start = tgis.adjust_datetime_to_granularity(start, granularity)
    else:
        start = int(start)
        if stop:
            stop = int(stop)
        else:
            stop = input_strds_end

    if input_strds.is_time_absolute():
        end = tgis.increment_datetime_by_string(start, cycle)
    else:
        end = start + cycle

    limit_relations = [
        "EQUALS", "DURING", "OVERLAPS", "OVERLAPPING", "CONTAINS"
    ]

    count = 1
    output_maps = []

    while input_strds_end > start and stop > start:

        # Make sure that the cyclic computation will stop at the correct time
        if stop and end > stop:
            end = stop

        where = "start_time >= \'%s\' AND start_time < \'%s\'" % (str(start),
                                                                  str(end))
        input_maps = input_strds.get_registered_maps_as_objects(where=where,
                                                                dbif=dbif)

        grass.message(_("Processing cycle %s - %s" % (str(start), str(end))))

        if len(input_maps) == 0:
            continue

        # Lets create a dummy list of maps with granularity conform intervals
        gran_list = []
        gran_list_low = []
        gran_list_up = []
        gran_start = start
        while gran_start < end:
            map = input_strds.get_new_map_instance("%i@%i" % (count, count))
            if input_strds.is_time_absolute():
                gran_end = tgis.increment_datetime_by_string(
                    gran_start, granularity)
                map.set_absolute_time(gran_start, gran_end)
                gran_start = tgis.increment_datetime_by_string(
                    gran_start, granularity)
            else:
                gran_end = gran_start + granularity
                map.set_relative_time(gran_start, gran_end,
                                      input_strds.get_relative_time_unit())
                gran_start = gran_start + granularity
            gran_list.append(copy(map))
            gran_list_low.append(copy(map))
            gran_list_up.append(copy(map))
        # Lists to compute the topology with upper and lower datasets

        # Create the topology between the granularity conform list and all maps
        # of the current cycle
        gran_topo = tgis.SpatioTemporalTopologyBuilder()
        gran_topo.build(gran_list, input_maps)

        if lower:
            lower_maps = lower_strds.get_registered_maps_as_objects(dbif=dbif)
            gran_lower_topo = tgis.SpatioTemporalTopologyBuilder()
            gran_lower_topo.build(gran_list_low, lower_maps)

        if upper:
            upper_maps = upper_strds.get_registered_maps_as_objects(dbif=dbif)
            gran_upper_topo = tgis.SpatioTemporalTopologyBuilder()
            gran_upper_topo.build(gran_list_up, upper_maps)

        old_map_name = None

        # Aggregate
        num_maps = len(gran_list)

        for i in range(num_maps):
            if reverse:
                map = gran_list[num_maps - i - 1]
            else:
                map = gran_list[i]
            # Select input maps based on temporal topology relations
            input_maps = []
            if map.get_equal():
                input_maps += map.get_equal()
            elif map.get_contains():
                input_maps += map.get_contains()
            elif map.get_overlaps():
                input_maps += map.get_overlaps()
            elif map.get_overlapped():
                input_maps += map.get_overlapped()
            elif map.get_during():
                input_maps += map.get_during()

            # Check input maps
            if len(input_maps) == 0:
                continue

            # New output map
            if input_strds.get_temporal_type(
            ) == 'absolute' and time_suffix == 'gran':
                suffix = tgis.create_suffix_from_datetime(
                    map.temporal_extent.get_start_time(),
                    input_strds.get_granularity())
                output_map_name = "{ba}_{su}".format(ba=base, su=suffix)
            elif input_strds.get_temporal_type(
            ) == 'absolute' and time_suffix == 'time':
                suffix = tgis.create_time_suffix(map)
                output_map_name = "{ba}_{su}".format(ba=base, su=suffix)
            else:
                output_map_name = tgis.create_numeric_suffix(
                    base, count, time_suffix)

            output_map_id = map.build_id(output_map_name, mapset)
            output_map = input_strds.get_new_map_instance(output_map_id)

            # Check if new map is in the temporal database
            if output_map.is_in_db(dbif):
                if grass.overwrite():
                    # Remove the existing temporal database entry
                    output_map.delete(dbif)
                    output_map = input_strds.get_new_map_instance(
                        output_map_id)
                else:
                    grass.fatal(
                        _("Map <%s> is already registered in the temporal"
                          " database, use overwrite flag to overwrite.") %
                        (output_map.get_map_id()))

            map_start, map_end = map.get_temporal_extent_as_tuple()

            if map.is_time_absolute():
                output_map.set_absolute_time(map_start, map_end)
            else:
                output_map.set_relative_time(map_start, map_end,
                                             map.get_relative_time_unit())

            limits_vals = limits.split(",")
            limits_lower = float(limits_vals[0])
            limits_upper = float(limits_vals[1])

            lower_map_name = None
            if lower:
                relations = gran_list_low[i].get_temporal_relations()
                for relation in limit_relations:
                    if relation in relations:
                        lower_map_name = str(relations[relation][0].get_id())
                        break

            upper_map_name = None
            if upper:
                relations = gran_list_up[i].get_temporal_relations()
                for relation in limit_relations:
                    if relation in relations:
                        upper_map_name = str(relations[relation][0].get_id())
                        break

            input_map_names = []
            for input_map in input_maps:
                input_map_names.append(input_map.get_id())

            # Set up the module
            accmod = Module("r.series.accumulate",
                            input=input_map_names,
                            output=output_map_name,
                            run_=False)

            if old_map_name:
                accmod.inputs["basemap"].value = old_map_name
            if lower_map_name:
                accmod.inputs["lower"].value = lower_map_name
            if upper_map_name:
                accmod.inputs["upper"].value = upper_map_name

            accmod.inputs["limits"].value = (limits_lower, limits_upper)

            if shift:
                accmod.inputs["shift"].value = float(shift)

            if scale:
                accmod.inputs["scale"].value = float(scale)

            if method:
                accmod.inputs["method"].value = method

            print(accmod)
            accmod.run()

            if accmod.popen.returncode != 0:
                dbif.close()
                grass.fatal(_("Error running r.series.accumulate"))

            output_maps.append(output_map)
            old_map_name = output_map_name
            count += 1

        # Increment the cycle
        start = end
        if input_strds.is_time_absolute():
            start = end
            if offset:
                start = tgis.increment_datetime_by_string(end, offset)

            end = tgis.increment_datetime_by_string(start, cycle)
        else:
            if offset:
                start = end + offset
            end = start + cycle

    # Insert the maps into the output space time dataset
    if output_strds.is_in_db(dbif):
        if grass.overwrite():
            output_strds.delete(dbif)
            output_strds = input_strds.get_new_instance(out_id)

    temporal_type, semantic_type, title, description = input_strds.get_initial_values(
    )
    output_strds.set_initial_values(temporal_type, semantic_type, title,
                                    description)
    output_strds.insert(dbif)

    empty_maps = []
    # Register the maps in the database
    count = 0
    for output_map in output_maps:
        count += 1
        if count % 10 == 0:
            grass.percent(count, len(output_maps), 1)
        # Read the raster map data
        output_map.load()
        # In case of a empty map continue, do not register empty maps

        if not register_null:
            if output_map.metadata.get_min() is None and \
                output_map.metadata.get_max() is None:
                empty_maps.append(output_map)
                continue

        # Insert map in temporal database
        output_map.insert(dbif)
        output_strds.register_map(output_map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    output_strds.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    dbif.close()

    # Remove empty maps
    if len(empty_maps) > 0:
        for map in empty_maps:
            grass.run_command("g.remove",
                              flags='f',
                              type="raster",
                              name=map.get_name(),
                              quiet=True)
Exemplo n.º 38
0
 def _onCmdProgress(self, event):
     """Update progress message info"""
     grass.percent(event.value, 100, 1)
     event.Skip()
Exemplo n.º 39
0
def extendLine(map, map_out, maxlen=200, scale=0.5, debug=False, verbose=1):
    #
    # map=Input map name
    # map_out=Output map with extensions
    # maxlen=Max length in map units that line can be extended (def=200)
    # scale=Maximum length of extension as proportion of original line, disabled if 0 (def=0.5)
    # vlen=number of verticies to look back in calculating line end direction (def=1)
    # Not sure if it is worth putting this in as parameter.
    #
    allowOverwrite = os.getenv('GRASS_OVERWRITE', '0') == '1'
    grass.info("map={}, map_out={}, maxlen={}, scale={}, debug={}".format(
        map, map_out, maxlen, scale, debug))
    vlen = 1  # not sure if this is worth putting in as parameter
    cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'parent', 'INTEGER'),
            (u'dend', 'TEXT'), (u'orgx', 'DOUBLE PRECISION'),
            (u'orgy', 'DOUBLE PRECISION'), (u'search_len', 'DOUBLE PRECISION'),
            (u'search_az', 'DOUBLE PRECISION'), (u'best_xid', 'INTEGER'),
            (u'near_x', 'DOUBLE PRECISION'), (u'near_y', 'DOUBLE PRECISION'),
            (u'other_cat', 'INTEGER'), (u'xtype', 'TEXT'),
            (u'x_len', 'DOUBLE PRECISION')]
    extend = VectorTopo('extend')
    if extend.exist():
        extend.remove()
    extend.open('w', tab_name='extend', tab_cols=cols)
    #
    # Go through input map, looking at each line and it's two nodes to find nodes
    # with only a single line starting/ending there - i.e. a dangle.
    # For each found, generate an extension line in the new map "extend"
    #
    inMap = VectorTopo(map)
    inMap.open('r')
    dangleCnt = 0
    tickLen = len(inMap)
    grass.info("Searching {} features for dangles".format(tickLen))
    ticker = 0
    grass.message("Percent complete...")
    for ln in inMap:
        ticker = (ticker + 1)
        grass.percent(ticker, tickLen, 5)
        if ln.gtype == 2:  # Only process lines
            for nd in ln.nodes():
                if nd.nlines == 1:  # We have a dangle
                    dangleCnt = dangleCnt + 1
                    vtx = min(len(ln) - 1, vlen)
                    if len([1 for _ in nd.lines(only_out=True)
                            ]) == 1:  # Dangle starting at node
                        dend = "head"
                        sx = ln[0].x
                        sy = ln[0].y
                        dx = sx - ln[vtx].x
                        dy = sy - ln[vtx].y
                    else:  # Dangle ending at node
                        dend = "tail"
                        sx = ln[-1].x
                        sy = ln[-1].y
                        dx = sx - ln[-(vtx + 1)].x
                        dy = sy - ln[-(vtx + 1)].y
                    endaz = math.atan2(dy, dx)
                    if scale > 0:
                        extLen = min(ln.length() * scale, maxlen)
                    else:
                        extLen = maxlen
                    ex = extLen * math.cos(endaz) + sx
                    ey = extLen * math.sin(endaz) + sy
                    extLine = geo.Line([(sx, sy), (ex, ey)])
                    quiet = extend.write(extLine,
                                         (ln.cat, dend, sx, sy, extLen, endaz,
                                          0, 0, 0, 0, 'null', extLen))

    grass.info(
        "{} dangle nodes found, committing table extend".format(dangleCnt))
    extend.table.conn.commit()
    extend.close(build=True, release=True)
    inMap.close()

    #
    # Create two tables where extensions intersect;
    # 1. intersect with original lines
    # 2. intersect with self - to extract intersects between extensions
    #
    # First the intersects with original lines
    grass.info(
        "Searching for intersects between potential extensions and original lines"
    )
    table_isectIn = Table('isectIn',
                          connection=sqlite3.connect(get_path(path)))
    if table_isectIn.exist():
        table_isectIn.drop(force=True)
    run_command("v.distance",
                flags='a',
                overwrite=True,
                quiet=True,
                from_="extend",
                from_type="line",
                to=map,
                to_type="line",
                dmax="0",
                upload="cat,dist,to_x,to_y",
                column="near_cat,dist,nx,ny",
                table="isectIn")
    # Will have touched the dangle it comes from, so remove those touches
    run_command(
        "db.execute",
        sql=
        "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn INNER JOIN extend ON from_cat=cat WHERE near_cat=parent)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="ALTER TABLE isectIn ADD ntype VARCHAR",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="UPDATE isectIn SET ntype = 'orig' ",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    #
    # Now second self intersect table
    #
    grass.info("Searching for intersects of potential extensions")
    table_isectX = Table('isectX', connection=sqlite3.connect(get_path(path)))
    if table_isectX.exist():
        table_isectX.drop(force=True)
    run_command("v.distance",
                flags='a',
                overwrite=True,
                quiet=True,
                from_="extend",
                from_type="line",
                to="extend",
                to_type="line",
                dmax="0",
                upload="cat,dist,to_x,to_y",
                column="near_cat,dist,nx,ny",
                table="isectX")
    # Obviously all extensions will intersect with themself, so remove those "intersects"
    run_command("db.execute",
                sql="DELETE FROM isectX WHERE from_cat = near_cat",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="ALTER TABLE isectX ADD ntype VARCHAR",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="UPDATE isectX SET ntype = 'ext' ",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    #
    # Combine the two tables and add a few more attributes
    #
    run_command("db.execute",
                sql="INSERT INTO isectIn SELECT * FROM isectX",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    cols_isectIn = Columns('isectIn',
                           connection=sqlite3.connect(get_path(path)))
    cols_isectIn.add(['from_x'], ['DOUBLE PRECISION'])
    cols_isectIn.add(['from_y'], ['DOUBLE PRECISION'])
    cols_isectIn.add(['ext_len'], ['DOUBLE PRECISION'])
    # Get starting coordinate at the end of the dangle
    run_command(
        "db.execute",
        sql=
        "UPDATE isectIn SET from_x = (SELECT extend.orgx FROM extend WHERE from_cat=extend.cat)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command(
        "db.execute",
        sql=
        "UPDATE isectIn SET from_y = (SELECT extend.orgy FROM extend WHERE from_cat=extend.cat)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()
    # For each intersect point, calculate the distance along extension line from end of dangle
    # Would be nicer to do this in the database but SQLite dosen't support sqrt or exponents
    grass.info(
        "Calculating distances of intersects along potential extensions")
    cur = table_isectIn.execute(
        sql_code="SELECT rowid, from_x, from_y, nx, ny FROM isectIn")
    for row in cur.fetchall():
        rowid, fx, fy, nx, ny = row
        x_len = math.sqrt((fx - nx)**2 + (fy - ny)**2)
        sqlStr = "UPDATE isectIn SET ext_len={:.8f} WHERE rowid={:d}".format(
            x_len, rowid)
        table_isectIn.execute(sql_code=sqlStr)
    grass.verbose("Ready to commit isectIn changes")
    table_isectIn.conn.commit()
    # Remove any zero distance from end of their dangle.
    # This happens when another extension intersects exactly at that point
    run_command("db.execute",
                sql="DELETE FROM isectIn WHERE ext_len = 0.0",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()

    # Go through the extensions and find the intersect closest to each origin.
    grass.info("Searching for closest intersect for each potential extension")

    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN bst INTEGER"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nrx DOUBLE PRECISION"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nry DOUBLE PRECISION"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN ocat TEXT"
    #    run_command("db.execute",
    #                sql = "INSERT OR REPLACE INTO extend_t1 (bst, nrx, nry, ocat) VALUES ((SELECT isectIn.rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=extend_t1.cat ORDER BY ext_len ASC LIMIT 1))",
    #               driver = "sqlite",
    #               database = "$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")

    grass.verbose("CREATE index")
    run_command("db.execute",
                sql="CREATE INDEX idx_from_cat ON isectIn (from_cat)",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE best_xid")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET best_xid = (SELECT isectIn.rowid FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE x_len")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET x_len = (SELECT ext_len FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE near_x")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET near_x = (SELECT nx FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE near_y")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET near_y = (SELECT ny FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE other_cat")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET other_cat = (SELECT near_cat FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE xtype")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET xtype = (SELECT ntype FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("DROP index")
    run_command("db.execute",
                sql="DROP INDEX idx_from_cat",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("CREATE index on near_cat")
    run_command("db.execute",
                sql="CREATE INDEX idx_near_cat ON isectIn (near_cat)",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")

    quiet = table_isectIn.filters.select('rowid', 'ext_len', 'nx', 'ny',
                                         'near_cat', 'ntype')
    #    quiet=table_isectIn.filters.order_by(['ext_len ASC'])
    quiet = table_isectIn.filters.order_by('ext_len ASC')
    quiet = table_isectIn.filters.limit(1)
    table_extend = Table('extend', connection=sqlite3.connect(get_path(path)))

    # Code below was relplaced by commands above untill memory problem can be sorted
    #    table_extend.filters.select('cat')
    #    cur=table_extend.execute()
    #    updateCnt = 0
    #    for row in cur.fetchall():
    #        cat, = row
    #        quiet=table_isectIn.filters.where('from_cat={:d}'.format(cat))

    ##SELECT rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=32734 ORDER BY ext_len ASC LIMIT 1

    #        x_sect=table_isectIn.execute().fetchone()
    #        if x_sect is not None:
    #            x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect
    #            sqlStr="UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(x_rowid, ext_len, nx, ny, other_cat, ntype, cat)
    #            table_extend.execute(sql_code=sqlStr)
    ## Try periodic commit to avoide crash!
    #            updateCnt = (updateCnt + 1) % 10000
    #            if updateCnt == 0:
    #              table_extend.conn.commit()
    grass.verbose("Ready to commit extend changes")
    table_extend.conn.commit()
    #
    # There may be extensions that crossed, and that intersection chosen by one but
    # not "recripricated" by the other.
    # Need to remove those possibilities and allow the jilted extension to re-search.
    #
    grass.verbose("Deleting intersects already resolved")
    run_command(
        "db.execute",
        sql=
        "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn JOIN extend ON near_cat=cat WHERE ntype='ext' AND xtype!='null')",  #"AND from_cat!=other_cat" no second chance!
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()
    grass.verbose("Deleting complete")

    # To find the jilted - need a copy of extensions that have found an
    # intersection (won't overwrite so drop first)
    grass.verbose(
        "Re-searching for mis-matched intersects between potential extensions")
    table_imatch = Table('imatch', connection=sqlite3.connect(get_path(path)))
    if table_imatch.exist():
        table_imatch.drop(force=True)
    wvar = "xtype!='null'"
    run_command(
        "db.copy",
        overwrite=True,
        quiet=True,
        from_driver="sqlite",
        from_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
        from_table="extend",
        to_driver="sqlite",
        to_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
        to_table="imatch",
        where=wvar)
    # Memory problems?
    if gc.isenabled():
        grass.verbose("Garbage collection enabled - forcing gc cycle")
        gc.collect()
    else:
        grass.verbose("Garbage collection not enabled")
# Ensure tables are commited
    table_extend.conn.commit()
    table_imatch.conn.commit()
    table_isectIn.conn.commit()
    # Identify the jilted
    sqlStr = "SELECT extend.cat FROM extend JOIN imatch ON extend.other_cat=imatch.cat WHERE extend.xtype='ext' and extend.cat!=imatch.other_cat"
    cur = table_extend.execute(sql_code=sqlStr)
    updateCnt = 0
    for row in cur.fetchall():
        cat, = row
        grass.verbose("Reworking extend.cat={}".format(cat))
        quiet = table_isectIn.filters.where('from_cat={:d}'.format(cat))
        #print("SQL: {}".format(table_isectIn.filters.get_sql()))
        x_sect = table_isectIn.execute().fetchone(
        )  ## Problem here under modules
        if x_sect is None:
            sqlStr = "UPDATE extend SET best_xid=0, x_len=search_len, near_x=0, near_y=0, other_cat=0, xtype='null' WHERE cat={:d}".format(
                cat)
        else:
            x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect
            sqlStr = "UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(
                x_rowid, ext_len, nx, ny, other_cat, ntype, cat)
        table_extend.execute(sql_code=sqlStr)
        ## Try periodic commit to avoide crash!
        updateCnt = (updateCnt + 1) % 100
        if (updateCnt == 0):  # or (cat == 750483):
            grass.verbose(
                "XXXXXXXXXXX Committing table_extend XXXXXXXXXXXXXXXXXXXXXX")
            table_extend.conn.commit()

    grass.verbose("Committing adjustments to table extend")
    table_extend.conn.commit()
    #
    # For debugging, create a map with the chosen intersect points
    #
    if debug:
        wvar = "xtype!='null' AND x_len!=0"
        #        print(wvar)
        run_command(
            "v.in.db",
            overwrite=True,
            quiet=True,
            table="extend",
            driver="sqlite",
            database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
            x="near_x",
            y="near_y",
            key="cat",
            where=wvar,
            output="chosen")
#
# Finally adjust the dangle lines in input map - use a copy (map_out) if requested
#
    if map_out:
        run_command("g.copy",
                    overwrite=allowOverwrite,
                    quiet=True,
                    vector=map + "," + map_out)
    else:  # Otherwise just modify the original dataset (map)
        if allowOverwrite:
            grass.warning("Modifying vector map ({})".format(map))
            map_out = map
        else:
            grass.error(
                "Use switch --o to modifying input vector map ({})".format(
                    map))
            return 1
#
# Get info for lines that need extending
    table_extend.filters.select(
        'parent, dend, near_x, near_y, search_az, xtype')
    table_extend.filters.where("xtype!='null'")
    extLines = table_extend.execute().fetchall()
    cat_mods = [ext[0] for ext in extLines]
    tickLen = len(cat_mods)
    grass.info("Extending {} dangles".format(tickLen))
    ticker = 0
    grass.message("Percent complete...")

    # Open up the map_out copy (or the original) and work through looking for lines that need modifying
    inMap = VectorTopo(map_out)
    inMap.open('rw', tab_name=map_out)

    for ln_idx in range(len(inMap)):
        ln = inMap.read(ln_idx + 1)
        if ln.gtype == 2:  # Only process lines
            while ln.cat in cat_mods:  # Note: could be 'head' and 'tail'
                ticker = (ticker + 1)
                grass.percent(ticker, tickLen, 5)
                cat_idx = cat_mods.index(ln.cat)
                cat, dend, nx, ny, endaz, xtype = extLines.pop(cat_idx)
                dump = cat_mods.pop(cat_idx)
                if xtype == 'orig':  # Overshoot by 0.1 as break lines is unreliable
                    nx = nx + 0.1 * math.cos(endaz)
                    ny = ny + 0.1 * math.sin(endaz)
                newEnd = geo.Point(x=nx, y=ny, z=None)
                if dend == 'head':
                    ln.insert(0, newEnd)
                else:  # 'tail'
                    ln.append(newEnd)
                quiet = inMap.rewrite(ln_idx + 1, ln)
        else:
            quite = inMap.delete(ln_idx + 1)


## Try periodic commit and garbage collection to avoide crash!
        if (ln_idx % 1000) == 0:
            #           inMap.table.conn.commit()  - no such thing - Why??
            if gc.isenabled():
                quiet = gc.collect()

    inMap.close(build=True, release=True)
    grass.message("v.extendlines completing")
    #
    # Clean up temporary tables and maps
    #
    if not debug:
        table_isectIn.drop(force=True)
        table_isectX.drop(force=True)
        table_imatch.drop(force=True)
        extend.remove()
        chosen = VectorTopo('chosen')
        if chosen.exist():
            chosen.remove()
    return 0
Exemplo n.º 40
0
def main():

    # Get the options
    file = options["file"]
    input = options["input"]
    maps = options["maps"]
    type = options["type"]

    # Make sure the temporal database exists
    tgis.init()

    if maps and file:
        grass.fatal(_(
            "%s= and %s= are mutually exclusive") % ("input", "file"))

    if not maps and not file:
        grass.fatal(_("%s= or %s= must be specified") % ("input", "file"))

    mapset = grass.gisenv()["MAPSET"]

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    # In case a space time dataset is specified
    if input:
        sp = tgis.open_old_stds(input, type, dbif)

    maplist = []

    dummy = tgis.RasterDataset(None)

    # Map names as comma separated string
    if maps is not None and maps != "":
        if maps.find(",") == -1:
            maplist = [maps, ]
        else:
            maplist = maps.split(",")

        # Build the maplist
        for count in range(len(maplist)):
            mapname = maplist[count]
            mapid = dummy.build_id(mapname, mapset)
            maplist[count] = mapid

    # Read the map list from file
    if file:
        fd = open(file, "r")

        line = True
        while True:
            line = fd.readline()
            if not line:
                break

            mapname = line.strip()
            mapid = dummy.build_id(mapname, mapset)
            maplist.append(mapid)

    num_maps = len(maplist)
    update_dict = {}
    count = 0

    statement = ""

    # Unregister already registered maps
    grass.message(_("Unregister maps"))
    for mapid in maplist:
        if count%10 == 0:
            grass.percent(count, num_maps, 1)

        map = tgis.dataset_factory(type, mapid)

        # Unregister map if in database
        if map.is_in_db(dbif) == True:
            # Unregister from a single dataset
            if input:
                # Collect SQL statements
                statement += sp.unregister_map(
                    map=map, dbif=dbif, execute=False)

            # Unregister from temporal database
            else:
                # We need to update all datasets after the removement of maps
                map.metadata.select(dbif)
                datasets = map.get_registered_stds(dbif)
                # Store all unique dataset ids in a dictionary
                if datasets:
                    for dataset in datasets:
                        update_dict[dataset] = dataset
                # Collect SQL statements
                statement += map.delete(dbif=dbif, update=False, execute=False)
        else:
            grass.warning(_("Unable to find %s map <%s> in temporal database" %
                            (map.get_type(), map.get_id())))

        count += 1

    # Execute the collected SQL statenents
    if statement:
        dbif.execute_transaction(statement)

    grass.percent(num_maps, num_maps, 1)

    # Update space time datasets
    if input:
        grass.message(_("Unregister maps from space time dataset <%s>"%(input)))
    else:
        grass.message(_("Unregister maps from the temporal database"))

    if input:
        sp.update_from_registered_maps(dbif)
        sp.update_command_string(dbif=dbif)
    elif len(update_dict) > 0:
        count = 0
        for key in update_dict.keys():
            id = update_dict[key]
            sp = tgis.open_old_stds(id, type, dbif)
            sp.update_from_registered_maps(dbif)
            grass.percent(count, len(update_dict), 1)
            count += 1

    dbif.close()
Exemplo n.º 41
0
def main():
    # Hard-coded parameters needed for USGS datasets
    usgs_product_dict = {
        "ned": {
            'product': 'National Elevation Dataset (NED)',
            'dataset': {
                'ned1sec': (1. / 3600, 30, 100),
                'ned13sec': (1. / 3600 / 3, 10, 30),
                'ned19sec': (1. / 3600 / 9, 3, 10)
            },
            'subset': {},
            'extent': [
                '1 x 1 degree',
                '15 x 15 minute'
            ],
            'format': 'IMG',
            'extension': 'img',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'bilinear',
            'url_split': '/'
        },
        "nlcd": {
            'product': 'National Land Cover Database (NLCD)',
            'dataset': {
                'National Land Cover Database (NLCD) - 2001': (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2006': (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2011': (1. / 3600, 30, 100)
            },
            'subset': {
                'Percent Developed Imperviousness',
                'Percent Tree Canopy',
                'Land Cover'
            },
            'extent': ['3 x 3 degree'],
            'format': 'GeoTIFF',
            'extension': 'tif',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "naip": {
            'product': 'USDA National Agriculture Imagery Program (NAIP)',
            'dataset': {
                'Imagery - 1 meter (NAIP)': (1. / 3600 / 27, 1, 3)},
            'subset': {},
            'extent': [
                '3.75 x 3.75 minute',
            ],
            'format': 'JPEG2000',
            'extension': 'jp2',
            'zip': False,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "lidar": {
            'product': 'Lidar Point Cloud (LPC)',
            'dataset': {
                'Lidar Point Cloud (LPC)': (1. / 3600 / 9, 3, 10)},
            'subset': {},
            'extent': [''],
            'format': 'LAS,LAZ',
            'extension': 'las,laz',
            'zip': True,
            'srs': '',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        }
    }

    # Set GRASS GUI options and flags to python variables
    gui_product = options['product']

    # Variable assigned from USGS product dictionary
    nav_string = usgs_product_dict[gui_product]
    product = nav_string['product']
    product_format = nav_string['format']
    product_extensions = tuple(nav_string['extension'].split(','))
    product_is_zip = nav_string['zip']
    product_srs = nav_string['srs']
    product_proj4 = nav_string['srs_proj4']
    product_interpolation = nav_string['interpolation']
    product_url_split = nav_string['url_split']
    product_extent = nav_string['extent']
    gui_subset = None

    # Parameter assignments for each dataset
    if gui_product == 'ned':
        gui_dataset = options['ned_dataset']
        ned_api_name = ''
        if options['ned_dataset'] == 'ned1sec':
            ned_data_abbrv = 'ned_1arc_'
            ned_api_name = '1 arc-second'
        if options['ned_dataset'] == 'ned13sec':
            ned_data_abbrv = 'ned_13arc_'
            ned_api_name = '1/3 arc-second'
        if options['ned_dataset'] == 'ned19sec':
            ned_data_abbrv = 'ned_19arc_'
            ned_api_name = '1/9 arc-second'
        product_tag = product + " " + ned_api_name

    if gui_product == 'nlcd':
        gui_dataset = options['nlcd_dataset']
        if options['nlcd_dataset'] == 'nlcd2001':
            gui_dataset = 'National Land Cover Database (NLCD) - 2001'
        if options['nlcd_dataset'] == 'nlcd2006':
            gui_dataset = 'National Land Cover Database (NLCD) - 2006'
        if options['nlcd_dataset'] == 'nlcd2011':
            gui_dataset = 'National Land Cover Database (NLCD) - 2011'

        if options['nlcd_subset'] == 'landcover':
            gui_subset = 'Land Cover'
        if options['nlcd_subset'] == 'impervious':
            gui_subset = 'Percent Developed Imperviousness'
        if options['nlcd_subset'] == 'canopy':
            gui_subset = 'Percent Tree Canopy'
        product_tag = gui_dataset

    if gui_product == 'naip':
        gui_dataset = 'Imagery - 1 meter (NAIP)'
        product_tag = nav_string['product']

    has_pdal = gscript.find_program(pgm='v.in.pdal')
    if gui_product == 'lidar':
        gui_dataset = 'Lidar Point Cloud (LPC)'
        product_tag = nav_string['product']
        if not has_pdal:
            gscript.warning(_("Module v.in.pdal is missing,"
                              " any downloaded data will not be processed."))
    # Assigning further parameters from GUI
    gui_output_layer = options['output_name']
    gui_resampling_method = options['resampling_method']
    gui_i_flag = flags['i']
    gui_k_flag = flags['k']
    work_dir = options['output_directory']
    memory = options['memory']
    nprocs = options['nprocs']

    preserve_extracted_files = gui_k_flag
    use_existing_extracted_files = True
    preserve_imported_tiles = gui_k_flag
    use_existing_imported_tiles = True

    if not os.path.isdir(work_dir):
        gscript.fatal(_("Directory <{}> does not exist."
                        " Please create it.").format(work_dir))

    # Returns current units
    try:
        proj = gscript.parse_command('g.proj', flags='g')
        if gscript.locn_is_latlong():
            product_resolution = nav_string['dataset'][gui_dataset][0]
        elif float(proj['meters']) == 1:
            product_resolution = nav_string['dataset'][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string['dataset'][gui_dataset][2]
    except TypeError:
        product_resolution = False
    if gui_product == 'lidar' and options['resolution']:
        product_resolution = float(options['resolution'])

    if gui_resampling_method == 'default':
        gui_resampling_method = nav_string['interpolation']
        gscript.verbose(_("The default resampling method for product {product} is {res}").format(product=gui_product,
                        res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    wgs84 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
    min_coords = gscript.read_command('m.proj', coordinates=(gregion['w'], gregion['s']),
                                      proj_out=wgs84, separator='comma',
                                      flags='d')
    max_coords = gscript.read_command('m.proj', coordinates=(gregion['e'], gregion['n']),
                                      proj_out=wgs84, separator='comma',
                                      flags='d')
    min_list = min_coords.split(',')[:2]
    max_list = max_coords.split(',')[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = quote_plus(gui_prod_str)
    prod_format = quote_plus(product_format)
    prod_extent = quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://viewer.nationalmap.gov/tnmaccess/api/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == 'nlcd':
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try_again_messge = _("Possibly, the query has timed out. Check network configuration and try again.")
    try:
        TNM_API_GET = urlopen(TNM_API_URL, timeout=12)
    except HTTPError as error:
        gscript.fatal(_(
            "HTTP(S) error from USGS TNM API:"
            " {code}: {reason} ({instructions})").format(
                reason=error.reason, code=error.code, instructions=try_again_messge))
    except (URLError, OSError, IOError) as error:
        # Catching also SSLError and potentially others which are
        # subclasses of IOError in Python 2 and of OSError in Python 3.
        gscript.fatal(_(
            "Error accessing USGS TNM API: {error} ({instructions})").format(
                error=error, instructions=try_again_messge))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON['errors']:
            TNM_API_error = return_JSON['errors']
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)
        if gui_product == 'lidar' and options['title_filter']:
            return_JSON['items'] = [item for item in return_JSON['items'] if options['title_filter'] in item['title']]
            return_JSON['total'] = len(return_JSON['items'])

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)
        if f['datasets'][0] not in dataset_name:
            if len(dataset_name) <= 1:
                dataset_name.append(str(f['datasets'][0]))

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON['total'])
    tiles_needed_count = 0
    size_diff_tolerance = 5
    exist_dwnld_size = 0
    if tile_API_count > 0:
        dwnld_size = []
        dwnld_url = []
        dataset_name = []
        TNM_file_titles = []
        exist_dwnld_url = []
        exist_TNM_titles = []
        exist_zip_list = []
        exist_tile_list = []
        extract_zip_list = []
        # for each file returned, assign variables to needed parameters
        for f in return_JSON['items']:
            TNM_file_title = f['title']
            TNM_file_URL = str(f['downloadURL'])
            TNM_file_size = int(f['sizeInBytes'])
            TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
            if gui_product == 'ned':
                local_file_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
                local_zip_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
                local_tile_path = os.path.join(work_dir, ned_data_abbrv + TNM_file_name)
            else:
                local_file_path = os.path.join(work_dir, TNM_file_name)
                local_zip_path = os.path.join(work_dir, TNM_file_name)
                local_tile_path = os.path.join(work_dir, TNM_file_name)
            file_exists = os.path.exists(local_file_path)
            file_complete = None
            # if file exists, but is incomplete, remove file and redownload
            if file_exists:
                existing_local_file_size = os.path.getsize(local_file_path)
                # if local file is incomplete
                if abs(existing_local_file_size - TNM_file_size) > size_diff_tolerance:
                    # add file to cleanup list
                    cleanup_list.append(local_file_path)
                    # NLCD API query returns subsets that cannot be filtered before
                    # results are returned. gui_subset is used to filter results.
                    if not gui_subset:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            down_list()
                        else:
                            continue
                else:
                    if not gui_subset:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            exist_list()
                            exist_dwnld_size += TNM_file_size
                        else:
                            continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                        continue

    # return fatal error if API query returns no results for GUI input
    elif tile_API_count == 0:
        gscript.fatal(_("TNM API ERROR or Zero tiles available for given input parameters."))

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _("\n{0} of {1} files/archive(s) exist locally and will be used by module.").format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: fix this way of reporting and merge it with the one in use
    if exist_tile_list:
        exist_msg = _("\n{0} of {1} files/archive(s) exist locally and will be used by module.").format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: simply continue with whatever is needed to be done in this case
    if cleanup_list:
        cleanup_msg = _("\n{0} existing incomplete file(s) detected and removed. Run module again.").format(len(cleanup_list))
        gscript.fatal(cleanup_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = '0'

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = 'none'

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == 'nlcd':
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = "NLCD {0} data unavailable for input parameters".format(gui_subset)
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
            "USGS file(s) to download:",
            "-------------------------",
            "Total download size:\t{size}",
            "Tile count:\t{count}",
            "USGS SRS:\t{srs}",
            "USGS tile titles:\n{tile}",
            "-------------------------",
        )
        data_info = '\n'.join(data_info).format(size=total_size_str,
                                                count=file_download_count,
                                                srs=product_srs,
                                                tile=TNM_file_titles_info)
    print(data_info)

    if gui_i_flag:
        gscript.info(_("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == 'ned':
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            # download files in chunks rather than write complete files to memory
            dwnld_req = urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()['Content-Length'])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
                gscript.percent(1, 1, 1)
            local_file.close()
            download_count += 1
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
            file_complete = "Download {0} of {1}: COMPLETE".format(
                download_count, TNM_count)
            gscript.info(file_complete)
        except URLError:
            gscript.fatal(_("USGS download request has timed out. Network or formatting error."))
        except StandardError:
            cleanup_list.append(local_file_path)
            if download_count:
                file_failed = "Download {0} of {1}: FAILED".format(
                    download_count, TNM_count)
                gscript.fatal(file_failed)

    # sets already downloaded zip files or tiles to be extracted or imported
    # our pre-stats for extraction are broken, collecting stats during
    used_existing_extracted_tiles_num = 0
    removed_extracted_tiles_num = 0
    old_extracted_tiles_num = 0
    extracted_tiles_num = 0
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        files_to_process = len(local_zip_path_list)
        for i, z in enumerate(local_zip_path_list):
            # TODO: measure only for the files being unzipped
            gscript.percent(i, files_to_process, 10)
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.lower().endswith(product_extensions):
                            extracted_tile = os.path.join(work_dir, str(f))
                            remove_and_extract = True
                            if os.path.exists(extracted_tile):
                                if use_existing_extracted_files:
                                    # if the downloaded file is newer
                                    # than the extracted on, we extract
                                    if os.path.getmtime(extracted_tile) < os.path.getmtime(z):
                                        remove_and_extract = True
                                        old_extracted_tiles_num += 1
                                    else:
                                        remove_and_extract = False
                                        used_existing_extracted_tiles_num += 1
                                else:
                                    remove_and_extract = True
                                if remove_and_extract:
                                    removed_extracted_tiles_num += 1
                                    os.remove(extracted_tile)
                            if remove_and_extract:
                                extracted_tiles_num += 1
                                read_zip.extract(f, work_dir)
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    if not preserve_extracted_files:
                        cleanup_list.append(extracted_tile)
            except IOError as error:
                cleanup_list.append(extracted_tile)
                gscript.fatal(_(
                    "Unable to locate or extract IMG file '{filename}'"
                    " from ZIP archive '{zipname}': {error}").format(
                        filename=extracted_tile, zipname=z, error=error))
        gscript.percent(1, 1, 1)
        # TODO: do this before the extraction begins
        gscript.verbose(_("Extracted {extracted} new tiles and"
                          " used {used} existing tiles").format(
            used=used_existing_extracted_tiles_num,
            extracted=extracted_tiles_num
        ))
        if old_extracted_tiles_num:
            gscript.verbose(_("Found {removed} existing tiles older"
                              " than the corresponding downloaded archive").format(
                            removed=old_extracted_tiles_num
                            ))
        if removed_extracted_tiles_num:
            gscript.verbose(_("Removed {removed} existing tiles").format(
                            removed=removed_extracted_tiles_num
                            ))

    if gui_product == 'lidar' and not has_pdal:
        gscript.fatal(_("Module v.in.pdal is missing,"
                        " cannot process downloaded data."))

    # operations for extracted or complete files available locally
    # We are looking only for the existing maps in the current mapset,
    # but theoretically we could be getting them from other mapsets
    # on search path or from the whole location. User may also want to
    # store the individual tiles in a separate mapset.
    # The big assumption here is naming of the maps (it is a smaller
    # for the files in a dedicated download directory).
    used_existing_imported_tiles_num = 0
    imported_tiles_num = 0
    mapset = get_current_mapset()
    files_to_import = len(local_tile_path_list)

    process_list = []
    process_id_list = []
    process_count = 0
    num_tiles = len(local_tile_path_list)

    with Manager() as manager:
        results = manager.dict()
        for i, t in enumerate(local_tile_path_list):
            # create variables for use in GRASS GIS import process
            LT_file_name = os.path.basename(t)
            LT_layer_name = os.path.splitext(LT_file_name)[0]
            # we are removing the files if requested even if we don't use them
            # do not remove by default with NAIP, there are no zip files
            if gui_product != 'naip' and not preserve_extracted_files:
                cleanup_list.append(t)
            # TODO: unlike the files, we don't compare date with input
            if use_existing_imported_tiles and map_exists("raster", LT_layer_name, mapset):
                patch_names.append(LT_layer_name)
                used_existing_imported_tiles_num += 1
            else:
                in_info = _("Importing and reprojecting {name}"
                            " ({count} out of {total})...").format(
                                name=LT_file_name, count=i + 1, total=files_to_import)
                gscript.info(in_info)

                process_count += 1
                if gui_product != 'lidar':
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i, LT_layer_name),
                        target=run_file_import, kwargs=dict(
                            identifier=i, results=results,
                            input=t, output=LT_layer_name,
                            resolution='value', resolution_value=product_resolution,
                            extent="region", resample=product_interpolation,
                            memory=memory
                        ))
                else:
                    srs = options['input_srs']
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i, LT_layer_name),
                        target=run_lidar_import, kwargs=dict(
                            identifier=i, results=results,
                            input=t, output=LT_layer_name,
                            input_srs=srs if srs else None
                        ))
                process.start()
                process_list.append(process)
                process_id_list.append(i)

            # Wait for processes to finish when we reached the max number
            # of processes.
            if process_count == nprocs or i == num_tiles - 1:
                exitcodes = 0
                for process in process_list:
                    process.join()
                    exitcodes += process.exitcode
                if exitcodes != 0:
                    if nprocs > 1:
                        gscript.fatal(_("Parallel import and reprojection failed."
                                        " Try running with nprocs=1."))
                    else:
                        gscript.fatal(_("Import and reprojection step failed."))
                for identifier in process_id_list:
                    if "errors" in results[identifier]:
                        gscript.warning(results[identifier]["errors"])
                    else:
                        patch_names.append(results[identifier]["output"])
                        imported_tiles_num += 1
                # Empty the process list
                process_list = []
                process_id_list = []
                process_count = 0
        # no process should be left now
        assert not process_list
        assert not process_id_list
        assert not process_count

    gscript.verbose(_("Imported {imported} new tiles and"
                      " used {used} existing tiles").format(
        used=used_existing_imported_tiles_num,
        imported=imported_tiles_num
    ))

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # v.surf.rst lidar params
    rst_params = dict(tension=25, smooth=0.1, npmin=100)

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if len(patch_names) > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command('g.region', res=product_resolution, flags='a')
                if gui_product == 'naip':
                    for i in ('1', '2', '3', '4'):
                        patch_names_i = [name + '.' + i for name in patch_names]
                        output = gui_output_layer + '.' + i
                        gscript.run_command('r.patch', input=patch_names_i,
                                            output=output)
                        gscript.raster_history(output)
                elif gui_product == 'lidar':
                    gscript.run_command('v.patch', flags='nzb', input=patch_names,
                                        output=gui_output_layer)
                    gscript.run_command('v.surf.rst', input=gui_output_layer,
                                        elevation=gui_output_layer, nprocs=nprocs,
                                        **rst_params)
                else:
                    gscript.run_command('r.patch', input=patch_names,
                                        output=gui_output_layer)
                    gscript.raster_history(gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added").format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if not -k flag
                if not preserve_imported_tiles:
                    if gui_product == 'naip':
                        for i in ('1', '2', '3', '4'):
                            patch_names_i = [name + '.' + i for name in patch_names]
                            gscript.run_command('g.remove', type='raster',
                                                name=patch_names_i, flags='f')
                    elif gui_product == 'lidar':
                        gscript.run_command('g.remove', type='vector',
                                            name=patch_names + [gui_output_layer], flags='f')
                    else:
                        gscript.run_command('g.remove', type='raster',
                                            name=patch_names, flags='f')
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
            temp_down_count = _(
                "{0} of {1} tiles successfully imported and patched").format(
                    completed_tiles_count, tiles_needed_count)
            gscript.info(temp_down_count)
        elif len(patch_names) == 1:
            if gui_product == 'naip':
                for i in ('1', '2', '3', '4'):
                    gscript.run_command('g.rename', raster=(patch_names[0] + '.' + i, gui_output_layer + '.' + i))
            elif gui_product == 'lidar':
                if product_resolution:
                    gscript.run_command('g.region', res=product_resolution, flags='a')
                gscript.run_command('v.surf.rst', input=patch_names[0],
                                    elevation=gui_output_layer, nprocs=nprocs,
                                    **rst_params)
                if not preserve_imported_tiles:
                    gscript.run_command('g.remove', type='vector',
                                        name=patch_names[0], flags='f')
            else:
                gscript.run_command('g.rename', raster=(patch_names[0], gui_output_layer))
            temp_down_count = _("Tile successfully imported")
            gscript.info(temp_down_count)
        else:
            gscript.fatal(_("No tiles imported successfully. Nothing to patch."))
    else:
        gscript.fatal(_(
            "Error in getting or importing the data (see above). Please retry."))

    # Keep source files if 'k' flag active
    if gui_k_flag:
        src_msg = ("<k> flag selected: Source tiles remain in '{0}'").format(work_dir)
        gscript.info(src_msg)

    # set appropriate color table
    if gui_product == 'ned':
        gscript.run_command('r.colors', map=gui_output_layer, color='elevation')

    # composite NAIP
    if gui_product == 'naip':
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=gui_output_layer + '.1')
        gscript.run_command('r.composite', red=gui_output_layer + '.1',
                            green=gui_output_layer + '.2', blue=gui_output_layer + '.3',
                            output=gui_output_layer)
        gscript.raster_history(gui_output_layer)
        gscript.del_temp_region()
Exemplo n.º 42
0
def main(options, flags):

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    base = options["basename"]
    method = options["type"]
    nprocs = int(options["nprocs"])
    column = options["column"]
    time_suffix = options["suffix"]

    register_null = flags["n"]
    t_flag = flags["t"]
    s_flag = flags["s"]
    v_flag = flags["v"]
    b_flag = flags["b"]
    z_flag = flags["z"]
    
    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = gscript.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        gscript.warning(_("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    # Check the new stvds
    new_sp = tgis.check_new_stds(output, "stvds", dbif=dbif,
                                 overwrite=overwrite)
                                               
    # Setup the flags
    flags = ""
    if t_flag is True:
        flags += "t"
    if s_flag is True:
        flags += "s"
    if v_flag is True:
        flags += "v"
    if b_flag is True:
        flags += "b"
    if z_flag is True:
        flags += "z"
    
    # Configure the r.to.vect module
    to_vector_module = pymod.Module("r.to.vect", input="dummy",
                                   output="dummy", run_=False,
                                   finish_=False, flags=flags,
                                   type=method, overwrite=overwrite,
                                   quiet=True)

    # The module queue for parallel execution, except if attribute tables should
    # be created. Then force single process use
    if t_flag is False:
        if nprocs > 1:
            nprocs = 1
            gscript.warning(_("The number of parellel r.to.vect processes was "\
                               "reduced to 1 because of the table attribute "\
                               "creation"))
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.to.vect all selected maps
    for map in maps:
        count += 1
        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(map.temporal_extent.get_start_time(),
                                                      sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffic(base, count, time_suffix)
        new_map = tgis.open_new_map_dataset(map_name, None, type="vector",
                                            temporal_extent=map.get_temporal_extent(),
                                            overwrite=overwrite, dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(to_vector_module)
        mod(input=map.get_id(), output=new_map.get_id())
        sys.stderr.write(mod.get_bash() + "\n")
        process_queue.put(mod)

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time vector dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "stvds", ttype, title,
                                descr, stype, dbif, overwrite)
    # collect empty maps to remove them
    num_maps = len(new_maps)
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count%10 == 0:
            gscript.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_number_of_primitives() == 0:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    gscript.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        gscript.run_command("g.remove", flags='f', type='vector', name=names, 
                            quiet=True)

    dbif.close()
Exemplo n.º 43
0
def main():
    import matplotlib  # required by windows

    matplotlib.use("wxAGG")  # required by windows
    import matplotlib.pyplot as plt

    input = options["input"]
    output = None
    if options["csv_output"]:
        output = options["csv_output"]
    plot_output = None
    if options["plot_output"]:
        plot_output = options["plot_output"]
    min_cells = False
    if options["min_cells"]:
        min_cells = int(options["min_cells"])
    target_res = None
    if options["max_size"]:
        target_res = float(options["max_size"])
    step = float(options["step"])

    global temp_resamp_map, temp_variance_map
    temp_resamp_map = "temp_resamp_map_%d" % os.getpid()
    temp_variance_map = "temp_variance_map_%d" % os.getpid()
    resolutions = []
    variances = []

    region = gscript.parse_command("g.region", flags="g")
    cells = int(region["cells"])
    res = (float(region["nsres"]) + float(region["ewres"])) / 2
    north = float(region["n"])
    south = float(region["s"])
    west = float(region["w"])
    east = float(region["e"])

    if res % 1 == 0 and step % 1 == 0:
        template_string = "%d,%f\n"
    else:
        template_string = "%f,%f\n"

    if min_cells:
        target_res_cells = int(
            sqrt(((east - west) * (north - south)) / min_cells))
        if target_res > target_res_cells:
            target_res = target_res_cells
            gscript.message(
                _("Max resolution leads to less cells than defined by 'min_cells' (%d)."
                  % min_cells))
            gscript.message(_("Max resolution reduced to %d" % target_res))

    nb_iterations = target_res - res / step
    if nb_iterations < 3:
        message = _("Less than 3 iterations. Cannot determine maxima.\n")
        message += _("Please increase max_size or reduce min_cells.")
        gscript.fatal(_(message))

    gscript.use_temp_region()

    gscript.message(_("Calculating variance at different resolutions"))
    while res <= target_res:
        gscript.percent(res, target_res, step)
        gscript.run_command(
            "r.resamp.stats",
            input=input,
            output=temp_resamp_map,
            method="average",
            quiet=True,
            overwrite=True,
        )
        gscript.run_command(
            "r.neighbors",
            input=temp_resamp_map,
            method="variance",
            output=temp_variance_map,
            quiet=True,
            overwrite=True,
        )
        varianceinfo = gscript.parse_command("r.univar",
                                             map_=temp_variance_map,
                                             flags="g",
                                             quiet=True)
        resolutions.append(res)
        variances.append(float(varianceinfo["mean"]))
        res += step
        region = gscript.parse_command("g.region",
                                       res=res,
                                       n=north,
                                       s=south,
                                       w=west,
                                       e=east,
                                       flags="ag")
        cells = int(region["cells"])

    indices, differences = FindMaxima(variances)
    max_resolutions = [resolutions[x] for x in indices]
    gscript.message(_("resolution,min_diff"))
    for i in range(len(max_resolutions)):
        print("%g,%g" % (max_resolutions[i], differences[i]))

    if output:
        header = "resolution,variance\n"
        of = open(output, "w")
        of.write(header)
        for i in range(len(resolutions)):
            output_string = template_string % (resolutions[i], variances[i])
            of.write(output_string)
        of.close()

    if plot_output:
        plt.plot(resolutions, variances)
        plt.xlabel("Resolution")
        plt.ylabel("Variance")
        plt.grid(True)
        if plot_output == "-":
            plt.show()
        else:
            plt.savefig(plot_output)
Exemplo n.º 44
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    nprocs = options["nprocs"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(_("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output, "strds", dbif=dbif,
                                               overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors", input="dummy",
                                   output="dummy", run_=False,
                                   finish_=False, size=int(size),
                                   method=method, overwrite=overwrite,
                                   quiet=True)

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        if sp.get_temporal_type() == 'absolute' and time_suffix == 'gran':
            suffix = tgis.create_suffix_from_datetime(map.temporal_extent.get_start_time(),
                                                      sp.get_granularity())
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        elif sp.get_temporal_type() == 'absolute' and time_suffix == 'time':
            suffix = tgis.create_time_suffix(map)
            map_name = "{ba}_{su}".format(ba=base, su=suffix)
        else:
            map_name = tgis.create_numeric_suffic(base, count, time_suffix)

        new_map = tgis.open_new_map_dataset(map_name, None, type="raster",
                                            temporal_extent=map.get_temporal_extent(),
                                            overwrite=overwrite, dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())
        print(mod.get_bash())
        process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title,
                                              descr, stype, dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count%10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove", flags='f', type='raster', name=names, quiet=True)

    dbif.close()
Exemplo n.º 45
0
def main():

    # Get the options
    input = options["input"]
    output = options["output"]
    where = options["where"]
    size = options["size"]
    base = options["basename"]
    register_null = flags["n"]
    method = options["method"]
    nprocs = options["nprocs"]

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    overwrite = grass.overwrite()

    sp = tgis.open_old_stds(input, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, dbif=dbif)

    if not maps:
        dbif.close()
        grass.warning(
            _("Space time raster dataset <%s> is empty") % sp.get_id())
        return

    new_sp = tgis.check_new_stds(output,
                                 "strds",
                                 dbif=dbif,
                                 overwrite=overwrite)
    # Configure the r.neighbor module
    neighbor_module = pymod.Module("r.neighbors",
                                   input="dummy",
                                   output="dummy",
                                   run_=False,
                                   finish_=False,
                                   size=int(size),
                                   method=method,
                                   overwrite=overwrite,
                                   quiet=True)

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))

    count = 0
    num_maps = len(maps)
    new_maps = []

    # run r.neighbors all selected maps
    for map in maps:
        count += 1
        map_name = "%s_%i" % (base, count)
        new_map = tgis.open_new_map_dataset(
            map_name,
            None,
            type="raster",
            temporal_extent=map.get_temporal_extent(),
            overwrite=overwrite,
            dbif=dbif)
        new_maps.append(new_map)

        mod = copy.deepcopy(neighbor_module)
        mod(input=map.get_id(), output=new_map.get_id())
        print(mod.get_bash())
        process_queue.put(mod)

    # Wait for unfinished processes
    process_queue.wait()

    # Open the new space time raster dataset
    ttype, stype, title, descr = sp.get_initial_values()
    new_sp = tgis.open_new_stds(output, "strds", ttype, title, descr, stype,
                                dbif, overwrite)
    num_maps = len(new_maps)
    # collect empty maps to remove them
    empty_maps = []

    # Register the maps in the database
    count = 0
    for map in new_maps:
        count += 1

        if count % 10 == 0:
            grass.percent(count, num_maps, 1)

        # Do not register empty maps
        map.load()
        if map.metadata.get_min() is None and \
            map.metadata.get_max() is None:
            if not register_null:
                empty_maps.append(map)
                continue

        # Insert map in temporal database
        map.insert(dbif)
        new_sp.register_map(map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    new_sp.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    # Remove empty maps
    if len(empty_maps) > 0:
        names = ""
        count = 0
        for map in empty_maps:
            if count == 0:
                count += 1
                names += "%s" % (map.get_name())
            else:
                names += ",%s" % (map.get_name())

        grass.run_command("g.remove",
                          flags='f',
                          type='raster',
                          name=names,
                          quiet=True)

    dbif.close()
Exemplo n.º 46
0
def main():

    # Get the options
    name = options["input"]
    type = options["type"]
    title = options["title"]
    description = options["description"]
    semantic = options["semantictype"]
    update = flags["u"]
    map_update = flags["m"]

    # Make sure the temporal database exists
    tgis.init()

    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    stds = tgis.open_old_stds(name, type, dbif)

    update = False
    if title:
        stds.metadata.set_title(title=title)
        update = True
        # Update only non-null entries
    if description:
        stds.metadata.set_description(description=description)
        update = True
    if semantic:
        stds.base.set_semantic_type(semantic_type=semantic)
        update = True

    if update:
        stds.update(dbif=dbif)

    if map_update:
        #Update the registered maps from the grass spatial database
        statement = ""
        # This dict stores the datasets that must be updated
        dataset_dict = {}

        count = 0
        maps = stds.get_registered_maps_as_objects(dbif=dbif)

        # We collect the delete and update statements
        for map in maps:

            count += 1
            if count%10 == 0:
                grass.percent(count, len(maps), 1)

            map.select(dbif=dbif)

            # Check if the map is present in the grass spatial database
            # Update if present, delete if not present
            if map.map_exists():
                # Read new metadata from the spatial database
                map.load()
                statement += map.update(dbif=dbif, execute=False)
            else:
                # Delete the map from the temporal database
                # We need to update all effected space time datasets
                datasets = map.get_registered_stds(dbif)
                if datasets:
                    for dataset in datasets:
                        dataset_dict[dataset] = dataset
                # Collect the delete statements
                statement += map.delete(dbif=dbif, update=False, execute=False)

        # Execute the collected SQL statements
        dbif.execute_transaction(statement)

        # Update the effected space time datasets
        for id in dataset_dict:
            stds_new = stds.get_new_instance(id)
            stds_new.select(dbif=dbif)
            stds_new.update_from_registered_maps(dbif=dbif)

    if map_update or update:
        stds.update_from_registered_maps(dbif=dbif)

    stds.update_command_string(dbif=dbif)

    dbif.close()