コード例 #1
0
ファイル: test.py プロジェクト: ruegdeg/r.learn.ml
def worker(cmd):

    window, src = cmd
    
    reg = Region()
    old_reg = deepcopy(reg)
    
    try:
        # update region
        reg.north = dict(window)['north']
        reg.south = dict(window)['south']
        reg.west = dict(window)['west']
        reg.east = dict(window)['east']
        reg.set_current()
        reg.write()
        reg.set_raster_region()
        
        # read raster data
        with RasterRow(src) as rs:
            arr = np.asarray(rs)
    except:
        pass

    finally:
        # reset region
        old_reg.write()
        reg.set_raster_region()
    
    return(arr)
コード例 #2
0
 def setRegion(parcelmap,betriebid):
     ## set region to parcel layer extent + buffer
     reg = Region()
     reg.vect(parcelmap.name)
     regbuffer = 100
     reg.north += regbuffer
     reg.east += regbuffer
     reg.south -= regbuffer
     reg.west -= regbuffer
     reg.set_current()
     # set_current() not working right now
     # so using g.region() :
     g.region(n=str(reg.north), s=str(reg.south), w=str(reg.west), e=str(reg.east), res='2', flags='a',quiet=quiet)
     g.region(save='B'+betriebid,overwrite=True,quiet=quiet)
コード例 #3
0
#!/usr/bin/env python3

import numpy as np
from grass.pygrass.raster import RasterRow
from grass.pygrass.modules import Module

from grass.pygrass.gis.region import Region

name = 'dmt@PERMANENT'

reg = Region()
reg.from_rast(name)
reg.set_current()

rast = RasterRow(name)
rast.open('r')

min = max = None
count = ncount = 0
for row in rast:
    for value in row:
        if np.isnan(value):
            ncount += 1
        else:
            if min is None:
                min = max = value
            else:
                if min > value:
                    min = value
                elif max < value:
                    max = value
コード例 #4
0
def main():
    """Do the main work"""

    # set numpy printing options
    np.set_printoptions(formatter={"float": lambda x: "{0:0.2f}".format(x)})

    # ==========================================================================
    # Input data
    # ==========================================================================
    # Required
    r_output = options["output"]
    r_dsm = options["input"]
    dsm_type = grass.parse_command("r.info", map=r_dsm, flags="g")["datatype"]

    # Test if DSM exist
    gfile_dsm = grass.find_file(name=r_dsm, element="cell")
    if not gfile_dsm["file"]:
        grass.fatal("Raster map <{}> not found".format(r_dsm))

    # Exposure settings
    v_source = options["sampling_points"]
    r_source = options["source"]
    source_cat = options["sourcecat"]
    r_weights = options["weights"]

    # test if source vector map exist and contains points
    if v_source:
        gfile_vsource = grass.find_file(name=v_source, element="vector")
        if not gfile_vsource["file"]:
            grass.fatal("Vector map <{}> not found".format(v_source))
        if not grass.vector.vector_info_topo(v_source, layer=1)["points"] > 0:
            grass.fatal("Vector map <{}> does not contain any points.".format(
                v_source))

    if r_source:
        gfile_rsource = grass.find_file(name=r_source, element="cell")
        if not gfile_rsource["file"]:
            grass.fatal("Raster map <{}> not found".format(r_source))

        # if source_cat is set, check that r_source is CELL
        source_datatype = grass.parse_command("r.info",
                                              map=r_source,
                                              flags="g")["datatype"]

        if source_cat != "*" and source_datatype != "CELL":
            grass.fatal(
                "The raster map <%s> must be integer (CELL type) in order to \
                use the 'sourcecat' parameter" % r_source)

    if r_weights:
        gfile_weights = grass.find_file(name=r_weights, element="cell")
        if not gfile_weights["file"]:
            grass.fatal("Raster map <{}> not found".format(r_weights))

    # Viewshed settings
    range_inp = float(options["range"])
    v_elevation = float(options["observer_elevation"])
    b_1 = float(options["b1_distance"])
    pfunction = options["function"]
    refr_coeff = float(options["refraction_coeff"])
    flagstring = ""
    if flags["r"]:
        flagstring += "r"
    if flags["c"]:
        flagstring += "c"

    # test values
    if v_elevation < 0.0:
        grass.fatal("Observer elevation must be larger than or equal to 0.0.")

    if range_inp <= 0.0 and range_inp != -1:
        grass.fatal("Exposure range must be larger than 0.0.")

    if pfunction == "Fuzzy_viewshed" and range_inp == -1:
        grass.fatal("Exposure range cannot be \
            infinity for fuzzy viewshed approch.")

    if pfunction == "Fuzzy_viewshed" and b_1 > range_inp:
        grass.fatal("Exposure range must be larger than radius around \
            the viewpoint where clarity is perfect.")

    # Sampling settings
    source_sample_density = float(options["sample_density"])
    seed = options["seed"]

    if not seed:  # if seed is not set, set it to process number
        seed = os.getpid()

    # Optional
    cores = int(options["nprocs"])
    memory = int(options["memory"])

    # ==========================================================================
    # Region settings
    # ==========================================================================
    # check that location is not in lat/long
    if grass.locn_is_latlong():
        grass.fatal("The analysis is not available for lat/long coordinates.")

    # get comp. region parameters
    reg = Region()

    # check that NSRES equals EWRES
    if abs(reg.ewres - reg.nsres) > 1e-6:
        grass.fatal("Variable north-south and east-west 2D grid resolution \
            is not supported")

    # adjust exposure range as a multiplicate of region resolution
    # if infinite, set exposure range to the max of region size
    if range_inp != -1:
        multiplicate = math.floor(range_inp / reg.nsres)
        exp_range = multiplicate * reg.nsres
    else:
        range_inf = max(reg.north - reg.south, reg.east - reg.west)
        multiplicate = math.floor(range_inf / reg.nsres)
        exp_range = multiplicate * reg.nsres

    if RasterRow("MASK", Mapset().name).exist():
        grass.warning("Current MASK is temporarily renamed.")
        unset_mask()

    # ==========================================================================
    # Random sample exposure source with target points T
    # ==========================================================================
    if v_source:
        # go for using input vector map as sampling points
        v_source_sample = v_source
        grass.verbose("Using sampling points from input vector map")

    else:
        # go for sampling

        # min. distance between samples set to half of region resolution
        # (issue in r.random.cells)
        sample_distance = reg.nsres / 2
        v_source_sample = sample_raster_with_points(
            r_source,
            source_cat,
            source_sample_density,
            sample_distance,
            "{}_rand_pts_vect".format(TEMPNAME),
            seed,
        )

    # ==========================================================================
    # Get coordinates and attributes of target points T
    # ==========================================================================
    # Prepare a list of maps to extract attributes from
    # DSM values
    attr_map_list = [r_dsm]

    if pfunction in ["Solid_angle", "Visual_magnitude"]:
        grass.verbose("Precomputing parameter maps...")

    # Precompute values A, B, C, D for solid angle function
    # using moving window [row, col]
    if pfunction == "Solid_angle":
        r_a_z = "{}_A_z".format(TEMPNAME)
        r_b_z = "{}_B_z".format(TEMPNAME)
        r_c_z = "{}_C_z".format(TEMPNAME)
        r_d_z = "{}_D_z".format(TEMPNAME)

        expr = ";".join([
            "$outmap_A = ($inmap[0, 0] + \
                          $inmap[0, -1] + \
                          $inmap[1, -1] + \
                          $inmap[1, 0]) / 4",
            "$outmap_B = ($inmap[-1, 0] + \
                          $inmap[-1, -1] + \
                          $inmap[0, -1] + \
                          $inmap[0, 0]) / 4",
            "$outmap_C = ($inmap[-1, 1] + \
                          $inmap[-1, 0] + \
                          $inmap[0, 0] + \
                          $inmap[0, 1]) / 4",
            "$outmap_D = ($inmap[0, 1] + \
                          $inmap[0, 0] + \
                          $inmap[1, 0] + \
                          $inmap[1, 1]) / 4",
        ])
        grass.mapcalc(
            expr,
            inmap=r_dsm,
            outmap_A=r_a_z,
            outmap_B=r_b_z,
            outmap_C=r_c_z,
            outmap_D=r_d_z,
            overwrite=True,
            quiet=grass.verbosity() <= 1,
        )

        attr_map_list.extend([r_a_z, r_b_z, r_c_z, r_d_z])

    # Precompute values slopes in e-w direction, n-s direction
    # as atan(dz/dx) (e-w direction), atan(dz/dy) (n-s direction)
    # using moving window [row, col]
    elif pfunction == "Visual_magnitude":

        r_slope_ew = "{}_slope_ew".format(TEMPNAME)
        r_slope_ns = "{}_slope_ns".format(TEMPNAME)

        expr = ";".join([
            "$outmap_ew = atan((sqrt(2) * $inmap[-1, 1] + \
                          2 * $inmap[0, 1] + \
                          sqrt(2) * $inmap[1, 1] - \
                          sqrt(2) * $inmap[-1, -1] - \
                          2 * $inmap[0, -1] - \
                          sqrt(2) * $inmap[1, -1]) / \
                          (8 * $w_ew))",
            "$outmap_ns = atan((sqrt(2) * $inmap[-1, -1] + \
                          2 * $inmap[-1, 0] + \
                          sqrt(2) * $inmap[-1, 1] - \
                          sqrt(2) * $inmap[1, -1] - \
                          2 * $inmap[1, 0] - \
                          sqrt(2) * $inmap[1, 1]) / \
                          (8 * $w_ns))",
        ])

        grass.mapcalc(
            expr,
            inmap=r_dsm,
            outmap_ew=r_slope_ew,
            outmap_ns=r_slope_ns,
            w_ew=reg.ewres,
            w_ns=reg.nsres,
            overwrite=True,
            quiet=grass.verbosity() <= 1,
        )

        attr_map_list.extend([r_slope_ew, r_slope_ns])

    # Use viewshed weights if provided
    if r_weights:
        attr_map_list.append(r_weights)

    # Extract attribute values
    target_pts_grass = grass.read_command(
        "r.what",
        flags="v",
        map=attr_map_list,
        points=v_source_sample,
        separator="|",
        null_value="*",
        quiet=True,
    )

    # columns to use depending on parametrization function
    usecols = list(range(0, 4 + len(attr_map_list)))
    usecols.remove(3)  # skip 3rd column - site_name

    # convert coordinates and attributes of target points T to numpy array
    target_pts_np = txt2numpy(
        target_pts_grass,
        sep="|",
        names=None,
        null_value="*",
        usecols=usecols,
        structured=False,
    )

    # if one point only - 0D array which cannot be used in iteration
    if target_pts_np.ndim == 1:
        target_pts_np = target_pts_np.reshape(1, -1)

    target_pts_np = target_pts_np[~np.isnan(target_pts_np).any(axis=1)]

    no_points = target_pts_np.shape[0]

    # if viewshed weights not set by flag - set weight to 1 for all pts
    if not r_weights:
        weights_np = np.ones((no_points, 1))
        target_pts_np = np.hstack((target_pts_np, weights_np))

    grass.debug("target_pts_np: {}".format(target_pts_np))

    # ==========================================================================
    # Calculate weighted parametrised cummulative viewshed
    # by iterating over target points T
    # ==========================================================================
    grass.verbose("Calculating partial viewsheds...")

    # Parametrisation function
    if pfunction == "Solid_angle":
        parametrise_viewshed = solid_angle_reverse

    elif pfunction == "Distance_decay":
        parametrise_viewshed = distance_decay_reverse

    elif pfunction == "Fuzzy_viewshed":
        parametrise_viewshed = fuzzy_viewshed_reverse

    elif pfunction == "Visual_magnitude":
        parametrise_viewshed = visual_magnitude_reverse

    else:
        parametrise_viewshed = binary

    # Collect variables that will be used in do_it_all() into a dictionary
    global_vars = {
        "region": reg,
        "range": exp_range,
        "param_viewshed": parametrise_viewshed,
        "observer_elevation": v_elevation,
        "b_1": b_1,
        "memory": memory,
        "refr_coeff": refr_coeff,
        "flagstring": flagstring,
        "r_dsm": r_dsm,
        "dsm_type": dsm_type,
        "cores": cores,
        "tempname": TEMPNAME,
    }

    # Split target points to chunks for each core
    target_pnts = np.array_split(target_pts_np, cores)

    # Combine each chunk with dictionary
    combo = list(zip(itertools.repeat(global_vars), target_pnts))

    # Calculate partial cummulative viewshed
    with Pool(cores) as pool:
        np_sum = pool.starmap(do_it_all, combo)
        pool.close()
        pool.join()

    # We should probably use nansum here?
    all_nan = np.all(np.isnan(np_sum), axis=0)
    np_sum = np.nansum(np_sum, axis=0, dtype=np.single)
    np_sum[all_nan] = np.nan

    grass.verbose("Writing final result and cleaning up...")

    # Restore original computational region
    reg.read()
    reg.set_current()
    reg.set_raster_region()

    # Convert numpy array of cummulative viewshed to raster
    numpy2raster(np_sum, mtype="FCELL", rastname=r_output, overwrite=True)

    # Remove temporary files and reset mask if needed
    cleanup()

    # Set raster history to output raster
    grass.raster_history(r_output, overwrite=True)
    grass.run_command(
        "r.support",
        overwrite=True,
        map=r_output,
        title="Visual exposure index as {}".format(pfunction.replace("_",
                                                                     " ")),
        description="generated by r.viewshed.exposure",
        units="Index value",
        quiet=True,
    )
コード例 #5
0
def main():
    in_vector = options['input'].split('@')[0]
    if len(options['input'].split('@')) > 1:
        in_mapset = options['input'].split('@')[1]
    else:
        in_mapset = None
    raster_maps = options['raster'].split(
        ',')  # raster file(s) to extract from
    output = options['output']
    methods = tuple(options['methods'].split(','))
    percentile = None if options['percentile'] == '' else map(
        float, options['percentile'].split(','))
    column_prefix = tuple(options['column_prefix'].split(','))
    buffers = options['buffers'].split(',')
    types = options['type'].split(',')
    layer = options['layer']
    sep = options['separator']
    update = flags['u']
    tabulate = flags['t']

    # Do checks using pygrass
    for rmap in raster_maps:
        r_map = RasterAbstractBase(rmap)
        if not r_map.exist():
            grass.fatal('Could not find raster map {}.'.format(rmap))

    m_map = RasterAbstractBase('MASK')
    if m_map.exist():
        grass.fatal("Please remove MASK first")

    invect = VectorTopo(in_vector)
    if not invect.exist():
        grass.fatal("Vector file {} does not exist".format(in_vector))

    if output:
        if output == '-':
            out = None
        else:
            out = open(output, 'w')

    # Check if input map is in current mapset (and thus editable)
    if in_mapset and unicode(in_mapset) != unicode(Mapset()):
        grass.fatal(
            "Input vector map is not in current mapset and cannot be modified. \
                    Please consider copying it to current mapset.".format(
                output))

    buffers = []
    for buf in options['buffers'].split(','):
        try:
            b = float(buf)
            if b.is_integer():
                buffers.append(int(b))
            else:
                buffers.append(b)
        except:
            grass.fatal('')
        if b < 0:
            grass.fatal("Negative buffer distance not supported!")

    ### Define column types depenting on statistic, map type and
    ### DB backend (SQLite supports only double and not real)
    # int: statistic produces allways integer precision
    # double: statistic produces allways floating point precision
    # map_type: precision f statistic depends on map type
    int_dict = {
        'number': (0, 'int', 'n'),
        'number_null': (1, 'int', 'null_cells'),
        'minimum': (3, 'map_type', 'min'),
        'maximum': (4, 'map_type', 'max'),
        'range': (5, 'map_type', 'range'),
        'average': (6, 'double', 'mean'),
        'average_abs': (7, 'double', 'mean_of_abs'),
        'stddev': (8, 'double', 'stddev'),
        'variance': (9, 'double', 'variance'),
        'coeff_var': (10, 'double', 'coeff_var'),
        'sum': (11, 'map_type', 'sum'),
        'first_quartile': (12, 'map_type', 'first_quartile'),
        'median': (13, 'map_type', 'median'),
        'third_quartile': (14, 'map_type', 'third_quartile'),
        'percentile': (15, 'map_type', 'percentile')
    }

    if len(raster_maps) != len(column_prefix):
        grass.fatal(
            'Number of maps and number of column prefixes has to be equal!')

    # Generate list of required column names and types
    col_names = []
    col_types = []
    for p in column_prefix:
        rmaptype, rcats = raster_type(raster_maps[column_prefix.index(p)])
        for b in buffers:
            b_str = unicode(b).replace('.', '_')
            if tabulate:
                if rmaptype == 'double precision':
                    grass.fatal(
                        '{} has floating point precision. Can only tabulate integer maps'
                        .format(raster_maps[column_prefix.index(p)]))
                col_names.append('{}_{}_b{}'.format(p, 'ncats', b_str))
                col_types.append('int')
                col_names.append('{}_{}_b{}'.format(p, 'mode', b_str))
                col_types.append('int')
                col_names.append('{}_{}_b{}'.format(p, 'null', b_str))
                col_types.append('double precision')
                col_names.append('{}_{}_b{}'.format(p, 'area_tot', b_str))
                col_types.append('double precision')
                for rcat in rcats:
                    col_names.append('{}_{}_b{}'.format(
                        p,
                        rcat.split('\t')[0], b_str))
                    col_types.append('double precision')
            else:
                for m in methods:
                    col_names.append('{}_{}_b{}'.format(
                        p, int_dict[m][2], b_str))
                    col_types.append(rmaptype if int_dict[m][1] ==
                                     'map_type' else int_dict[m][1])
                if percentile:
                    for perc in percentile:
                        col_names.append('{}_percentile_{}_b{}'.format(
                            p,
                            int(perc) if (perc).is_integer() else perc, b_str))
                        col_types.append(rmaptype if int_dict[m][1] ==
                                         'map_type' else int_dict[m][1])

    # Open input vector map
    in_vect = VectorTopo(in_vector, layer=layer)
    in_vect.open(mode='r')

    # Get name for temporary map
    tmp_map = random_name(21)
    TMP_MAPS.append(tmp_map)

    # Setup stats collectors
    if tabulate:
        # Collector for raster category statistics
        stats = Module('r.stats', run_=False, stdout_=PIPE)
        stats.inputs.sort = 'desc'
        stats.inputs.null_value = 'null'
        stats.flags.a = True
        stats.flags.quiet = True
    else:
        # Collector for univariat statistics
        univar = Module('r.univar', run_=False, stdout_=PIPE)
        univar.inputs.separator = sep
        univar.flags.g = True
        univar.flags.quiet = True

        # Add extended statistics if requested
        if set(methods).intersection(
                set(['first_quartile', 'median', 'third_quartile'])):
            univar.flags.e = True

        if percentile is not None:
            univar.flags.e = True
            univar.inputs.percentile = percentile

    # Check if attribute table exists
    if not output:
        if not in_vect.table:
            grass.fatal(
                'No attribute table found for vector map {}'.format(in_vect))

        # Modify table as needed
        tab = in_vect.table
        tab_name = tab.name
        tab_cols = tab.columns

        # Add required columns
        existing_cols = list(set(tab_cols.names()).intersection(col_names))
        if len(existing_cols) > 0:
            if not update:
                grass.fatal(
                    'Column(s) {} already exist! Please use the u-flag \
                            if you want to update values in those columns'.
                    format(','.join(existing_cols)))
            else:
                grass.warning('Column(s) {} already exist!'.format(
                    ','.join(existing_cols)))
        for e in existing_cols:
            idx = col_names.index(e)
            del col_names[idx]
            del col_types[idx]
        tab_cols.add(col_names, col_types)

        conn = tab.conn
        cur = conn.cursor()

        sql_str_start = 'UPDATE {} SET '.format(tab_name)

    # Get computational region
    #grass.use_temp_region()
    r = Region()
    #r.read()

    # Adjust region extent to buffer around geometry
    #reg = deepcopy(r)

    # Create iterator for geometries of all selected types
    geoms = chain()
    geoms_n = 0
    n_geom = 1
    for geom_type in types:
        geoms_n += in_vect.number_of(geom_type)
        if in_vect.number_of(geom_type) > 0:
            geoms = chain(in_vect.viter(geom_type))

    # Loop over geometries
    for geom in geoms:
        # Get cat
        cat = geom.cat
        # Give progress information
        grass.percent(n_geom, geoms_n, 1)
        n_geom = n_geom + 1

        # Add where clause to UPDATE statement
        sql_str_end = ' WHERE cat = {};'.format(cat)

        # Loop over ser provided buffer distances
        for buf in buffers:
            b_str = unicode(buf).replace('.', '_')
            # Buffer geometry
            if buf <= 0:
                buffer_geom = geom
            else:
                buffer_geom = geom.buffer(buf)
            # Create temporary vector map with buffered geometry
            tmp_vect = VectorTopo(tmp_map, quiet=True)
            tmp_vect.open(mode='w')
            #print(int(cat))
            tmp_vect.write(Boundary(points=buffer_geom[0].to_list()))
            # , c_cats=int(cat), set_cats=True
            tmp_vect.write(Centroid(x=buffer_geom[1].x, y=buffer_geom[1].y),
                           cat=int(cat))

            #################################################
            # How to silence VectorTopo???
            #################################################

            # Save current stdout
            #original = sys.stdout

            #f = open(os.devnull, 'w')
            #with open('output.txt', 'w') as f:
            #sys.stdout = io.BytesIO()
            #sys.stdout.fileno() = os.devnull
            #sys.stderr = f
            #os.environ.update(dict(GRASS_VERBOSE='0'))
            tmp_vect.close(build=False)
            grass.run_command('v.build', map=tmp_map, quiet=True)
            #os.environ.update(dict(GRASS_VERBOSE='1'))

            #reg = Region()
            #reg.read()
            #r.from_vect(tmp_map)
            r = align_current(r, buffer_geom[0].bbox())
            r.set_current()

            # Check if the following is needed
            #grass.run_command('g.region', vector=tmp_map)

            # Create a MASK from buffered geometry
            grass.run_command('v.to.rast',
                              input=tmp_map,
                              output='MASK',
                              use='val',
                              value=int(cat),
                              quiet=True)

            #reg.write()

            updates = []
            # Compute statistics for every raster map
            for rm in range(len(raster_maps)):
                rmap = raster_maps[rm]
                prefix = column_prefix[rm]

                if tabulate:
                    # Get statistics on occurrence of raster categories within buffer
                    stats.inputs.input = rmap
                    stats.run()
                    t_stats = stats.outputs['stdout'].value.rstrip(
                        os.linesep).replace(
                            ' ', '_b{} = '.format(b_str)).split(os.linesep)

                    if t_stats[0].split('_b{} = '.format(b_str))[0].split(
                            '_')[-1] != 'null':
                        mode = t_stats[0].split(
                            '_b{} = '.format(b_str))[0].split('_')[-1]
                    elif len(t_stats) == 1:
                        mode = 'NULL'
                    else:
                        mode = t_stats[1].split(
                            '_b{} = '.format(b_str))[0].split('_')[-1]

                    if not output:
                        updates.append('\t{}_{}_b{} = {}'.format(
                            prefix, 'ncats', b_str, len(t_stats)))
                        updates.append('\t{}_{}_b{} = {}'.format(
                            prefix, 'mode', b_str, mode))

                        area_tot = 0
                        for l in t_stats:
                            updates.append('\t{}_{}'.format(prefix, l))
                            if l.split('_b{} ='.format(b_str))[0].split(
                                    '_')[-1] != 'null':
                                area_tot = area_tot + float(l.split('= ')[1])

                        updates.append('\t{}_{}_b{} = {}'.format(
                            prefix, 'area_tot', b_str, area_tot))

                    else:
                        out_str = '{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}'.format(
                            sep, cat, prefix, buffer, 'ncats', len(t_stats),
                            os.linesep)
                        out_str += '{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}'.format(
                            sep, cat, prefix, buffer, 'mode', mode, os.linesep)
                        area_tot = 0
                        for l in t_stats:
                            rcat = l.split(
                                '_b{} ='.format(b_str))[0].split('_')[-1]
                            area = l.split('= ')[1]
                            out_str += '{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}'.format(
                                sep, cat, prefix, buffer,
                                'area {}'.format(rcat), area, os.linesep)
                            if rcat != 'null':
                                area_tot = area_tot + float(l.split('= ')[1])
                        out_str += '{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}'.format(
                            sep, cat, prefix, buffer, 'area_tot', area_tot,
                            os.linesep)

                        if output == '-':
                            print(out_str.rstrip(os.linesep))
                        else:
                            out.write(out_str)

                else:
                    # Get univariate statistics within buffer
                    univar.inputs.map = rmap
                    univar.run()
                    u_stats = univar.outputs['stdout'].value.rstrip(
                        os.linesep).replace(
                            '=', '_b{} = '.format(b_str)).split(os.linesep)

                    # Test i u_stats is empty and give warning
                    if (percentile and len(u_stats) <= 14) or (
                            univar.flags.e
                            and len(u_stats) <= 13) or len(u_stats) <= 12:
                        grass.warning(
                            'No data within buffer {} around geometry {}'.
                            format(buf, cat))
                        break

                    # Extract statistics for selected methods
                    for m in methods:
                        if not output:
                            # Add to list of UPDATE statements
                            updates.append('\t{}_{}'.format(
                                prefix, u_stats[int_dict[m][0]]))
                        else:
                            out_str = '{1}{0}{2}{0}{3}{0}{4}{0}{5}'.format(
                                sep, cat, prefix, buf, m,
                                u_stats[int_dict[m][0]].split('= ')[1])
                            if output == '-':
                                print(out_str)
                            else:
                                out.write(out_str)

                    if percentile:
                        perc_count = 0
                        for perc in percentile:
                            if not output:
                                updates.append(
                                    '{}_percentile_{}_b{} = {}'.format(
                                        p,
                                        int(perc) if
                                        (perc).is_integer() else perc, b_str,
                                        u_stats[15 +
                                                perc_count].split('= ')[1]))
                            else:
                                out_str = '{1}{0}{2}{0}{3}{0}{4}{0}{5}'.format(
                                    sep, cat, prefix, buffer,
                                    'percentile_{}'.format(
                                        int(perc) if (
                                            perc).is_integer() else perc),
                                    u_stats[15 + perc_count].split('= ')[1])
                                if output == '-':
                                    print(out_str)
                                else:
                                    out.write(out_str)
                            perc_count = perc_count + 1

            if not output and len(updates) > 0:
                cur.execute('{}{}{}'.format(sql_str_start, ',\n'.join(updates),
                                            sql_str_end))

            # Remove temporary maps
            #, stderr=os.devnull, stdout_=os.devnull)
            grass.run_command('g.remove',
                              flags='f',
                              type='raster',
                              name='MASK',
                              quiet=True)
            grass.run_command('g.remove',
                              flags='f',
                              type='vector',
                              name=tmp_map,
                              quiet=True)

        if not output:
            conn.commit()

    # Close cursor and DB connection
    if not output and not output == "-":
        cur.close()
        conn.close()
        # Update history
        grass.vector.vector_history(in_vector)
    elif output != "-":
        # write results to file
        out.close()

    # Clean up
    cleanup()