Example #1
0
def adjust_region(width, height):
    region = grass.region()

    mapwidth = abs(region["e"] - region["w"])
    mapheight = abs(region["n"] - region["s"])

    region["nsres"] = mapheight / height
    region["ewres"] = mapwidth / width
    region["rows"] = int(round(mapheight / region["nsres"]))
    region["cols"] = int(round(mapwidth / region["ewres"]))
    region["cells"] = region["rows"] * region["cols"]

    kwdata = [
        ("proj", "projection"),
        ("zone", "zone"),
        ("north", "n"),
        ("south", "s"),
        ("east", "e"),
        ("west", "w"),
        ("cols", "cols"),
        ("rows", "rows"),
        ("e-w resol", "ewres"),
        ("n-s resol", "nsres"),
    ]

    grass_region = ""
    for wkey, rkey in kwdata:
        grass_region += "%s: %s;" % (wkey, region[rkey])

    os.environ["GRASS_REGION"] = grass_region
Example #2
0
def adjust_region(width, height):
    region = grass.region()
    
    mapwidth  = abs(region["e"] - region["w"])
    mapheight = abs(region['n'] - region['s'])
    
    region["nsres"] =  mapheight / height
    region["ewres"] =  mapwidth  / width
    region['rows']  = int(round(mapheight / region["nsres"]))
    region['cols']  = int(round(mapwidth / region["ewres"]))
    region['cells'] = region['rows'] * region['cols']
    
    kwdata = [('proj',      'projection'),
              ('zone',      'zone'),
              ('north',     'n'),
              ('south',     's'),
              ('east',      'e'),
              ('west',      'w'),
              ('cols',      'cols'),
              ('rows',      'rows'),
              ('e-w resol', 'ewres'),
              ('n-s resol', 'nsres')]
    
    grass_region = ''
    for wkey, rkey in kwdata:
        grass_region += '%s: %s;' % (wkey, region[rkey])
    
    os.environ['GRASS_REGION'] = grass_region
Example #3
0
    def CreateDatalist(self, raster, coords):
        """!Build a list of distance, value pairs for points along transect using r.profile
        """
        datalist = []
        
        # keep total number of transect points to 500 or less to avoid 
        # freezing with large, high resolution maps
        region = grass.region()
        curr_res = min(float(region['nsres']),float(region['ewres']))
        transect_rec = 0
        if self.transect_length / curr_res > 500:
            transect_res = self.transect_length / 500
        else: transect_res = curr_res
        
        ret = gcmd.RunCommand("r.profile",
                              parent = self,
                              input = raster,
                              profile = coords,
                              res = transect_res,
                              null = "nan",
                              quiet = True,
                              read = True)
        
        if not ret:
            return []
            
        for line in ret.splitlines():
            dist, elev = line.strip().split(' ')
            if elev != 'nan':
                datalist.append((dist,elev))

        return datalist
Example #4
0
    def _computeRegions(self,
                        count,
                        startRegion,
                        endRegion=None,
                        zoomValue=None):
        """Computes regions based on start region and end region or zoom value
        for each of the animation frames."""
        region = dict(gcore.region())  # cast to dict, otherwise deepcopy error
        if startRegion:
            region = dict(
                parse_key_val(
                    gcore.read_command("g.region",
                                       flags="gu",
                                       region=startRegion),
                    val_type=float,
                ))

        del region["cells"]
        del region["cols"]
        del region["rows"]
        if "projection" in region:
            del region["projection"]
        if "zone" in region:
            del region["zone"]
        regions = []
        for i in range(self._mapCount):
            regions.append(copy.copy(region))
        self._regions = regions
        if not (endRegion or zoomValue):
            return

        startRegionDict = parse_key_val(
            gcore.read_command("g.region", flags="gu", region=startRegion),
            val_type=float,
        )
        if endRegion:
            endRegionDict = parse_key_val(
                gcore.read_command("g.region", flags="gu", region=endRegion),
                val_type=float,
            )
            for key in ("n", "s", "e", "w", "nsres", "ewres"):
                values = interpolate(startRegionDict[key], endRegionDict[key],
                                     self._mapCount)
                for value, region in zip(values, regions):
                    region[key] = value

        elif zoomValue:
            for i in range(self._mapCount):
                regions[i]["n"] -= zoomValue[0] * i
                regions[i]["e"] -= zoomValue[1] * i
                regions[i]["s"] += zoomValue[0] * i
                regions[i]["w"] += zoomValue[1] * i

                # handle cases when north < south and similarly EW
                if (regions[i]["n"] < regions[i]["s"]
                        or regions[i]["e"] < regions[i]["w"]):
                    regions[i] = regions[i - 1]

        self._regions = regions
Example #5
0
def obtainAreaVector(outrast):
    """Create the string for configuration file"""
    reg = grass.region()
    return "MASKEDOVERLAYAREA {name}|{n}|{s}|{e}|{w}\n".format(name=outrast,
                                                               n=reg['n'],
                                                               s=reg['s'],
                                                               e=reg['e'],
                                                               w=reg['w'])
Example #6
0
def obtainAreaVector(outrast):
    """Create the string for configuration file"""
    reg = grass.region()
    return "MASKEDOVERLAYAREA {name}|{n}|{s}|{e}|{w}\n".format(name=outrast,
                                                               n=reg['n'],
                                                               s=reg['s'],
                                                               e=reg['e'],
                                                               w=reg['w'])
Example #7
0
    def _computeRegions(self,
                        width,
                        height,
                        count,
                        startRegion,
                        endRegion=None,
                        zoomValue=None):
        """Computes regions based on start region and end region or zoom value
        for each of the animation frames."""
        currRegion = dict(
            gcore.region())  # cast to dict, otherwise deepcopy error
        del currRegion['cells']
        del currRegion['cols']
        del currRegion['rows']
        regions = []
        for i in range(self._mapCount):
            if endRegion or zoomValue:
                regions.append(copy.copy(currRegion))
            else:
                regions.append(None)
        if not startRegion:
            self._regions = regions
            return

        startRegionDict = parse_key_val(gcore.read_command('g.region',
                                                           flags='gu',
                                                           region=startRegion),
                                        val_type=float)
        if endRegion:
            endRegionDict = parse_key_val(gcore.read_command('g.region',
                                                             flags='gu',
                                                             region=endRegion),
                                          val_type=float)
            for key in ('n', 's', 'e', 'w'):
                values = interpolate(startRegionDict[key], endRegionDict[key],
                                     self._mapCount)
                for value, region in zip(values, regions):
                    region[key] = value

        elif zoomValue:
            for i in range(self._mapCount):
                regions[i]['n'] -= zoomValue[0] * i
                regions[i]['e'] -= zoomValue[1] * i
                regions[i]['s'] += zoomValue[0] * i
                regions[i]['w'] += zoomValue[1] * i

                # handle cases when north < south and similarly EW
                if regions[i]['n'] < regions[i]['s'] or \
                   regions[i]['e'] < regions[i]['w']:
                    regions[i] = regions[i - 1]

        for region in regions:
            mapwidth = abs(region['e'] - region['w'])
            mapheight = abs(region['n'] - region['s'])
            region['nsres'] = mapheight / height
            region['ewres'] = mapwidth / width

        self._regions = regions
Example #8
0
 def writeCircle(self, circle, rasterName):
     coords = self.mapWindow.Pixel2Cell(circle.point)
     RunCommand('r.circle', output=rasterName, max=circle.radius,
                coordinate=coords, flags="b")
     grass.use_temp_region()
     grass.run_command('g.region', zoom=rasterName)
     region = grass.region()
     marea = MaskedArea(region, rasterName, circle.radius)
     return marea
Example #9
0
 def writeCircle(self, circle, rasterName):
     coords = self.mapWindow.Pixel2Cell(circle.point)
     RunCommand('r.circle', output=rasterName, max=circle.radius,
                coordinate=coords, flags="b")
     grass.use_temp_region()
     grass.run_command('g.region', zoom=rasterName)
     region = grass.region()
     marea = MaskedArea(region, rasterName, circle.radius)
     return marea
Example #10
0
    def _computeRegions(self,
                        count,
                        startRegion,
                        endRegion=None,
                        zoomValue=None):
        """Computes regions based on start region and end region or zoom value
        for each of the animation frames."""
        region = dict(gcore.region())  # cast to dict, otherwise deepcopy error
        if startRegion:
            region = dict(
                parse_key_val(gcore.read_command('g.region',
                                                 flags='gu',
                                                 region=startRegion),
                              val_type=float))

        del region['cells']
        del region['cols']
        del region['rows']
        if 'projection' in region:
            del region['projection']
        if 'zone' in region:
            del region['zone']
        regions = []
        for i in range(self._mapCount):
            regions.append(copy.copy(region))
        self._regions = regions
        if not (endRegion or zoomValue):
            return

        startRegionDict = parse_key_val(gcore.read_command('g.region',
                                                           flags='gu',
                                                           region=startRegion),
                                        val_type=float)
        if endRegion:
            endRegionDict = parse_key_val(gcore.read_command('g.region',
                                                             flags='gu',
                                                             region=endRegion),
                                          val_type=float)
            for key in ('n', 's', 'e', 'w', 'nsres', 'ewres'):
                values = interpolate(startRegionDict[key], endRegionDict[key],
                                     self._mapCount)
                for value, region in zip(values, regions):
                    region[key] = value

        elif zoomValue:
            for i in range(self._mapCount):
                regions[i]['n'] -= zoomValue[0] * i
                regions[i]['e'] -= zoomValue[1] * i
                regions[i]['s'] += zoomValue[0] * i
                regions[i]['w'] += zoomValue[1] * i

                # handle cases when north < south and similarly EW
                if regions[i]['n'] < regions[i]['s'] or \
                   regions[i]['e'] < regions[i]['w']:
                    regions[i] = regions[i - 1]

        self._regions = regions
Example #11
0
    def writeArea(self, coords, rasterName):
        polyfile = tempfile.NamedTemporaryFile(delete=False)
        polyfile.write("AREA\n")
        for coor in coords:
            east, north = coor
            point = " %s %s\n" % (east, north)
            polyfile.write(point)

        catbuf = "=%d a\n" % self.catId
        polyfile.write(catbuf)
        self.catId = self.catId + 1

        polyfile.close()
        region_settings = grass.parse_command("g.region",
                                              flags="p",
                                              delimiter=":")
        pname = polyfile.name.split("/")[-1]
        tmpraster = "rast_" + pname
        tmpvector = "vect_" + pname
        wx.BeginBusyCursor()
        wx.GetApp().Yield()
        RunCommand(
            "r.in.poly",
            input=polyfile.name,
            output=tmpraster,
            rows=region_settings["rows"],
            overwrite=True,
        )

        RunCommand("r.to.vect",
                   input=tmpraster,
                   output=tmpvector,
                   type="area",
                   overwrite=True)

        RunCommand("v.to.rast",
                   input=tmpvector,
                   output=rasterName,
                   value=1,
                   use="val")
        wx.EndBusyCursor()
        grass.use_temp_region()
        grass.run_command("g.region", vector=tmpvector)
        region = grass.region()

        marea = MaskedArea(region, rasterName)

        RunCommand("g.remove", flags="f", type="raster", name=tmpraster)
        RunCommand("g.remove", flags="f", type="vector", name=tmpvector)

        os.unlink(polyfile.name)
        return marea
Example #12
0
    def writeArea(self, coords, rasterName):
        polyfile = tempfile.NamedTemporaryFile(delete=False)
        polyfile.write("AREA\n")
        for coor in coords:
            east, north = coor
            point = " %s %s\n" % (east, north)
            polyfile.write(point)

        catbuf = "=%d a\n" % self.catId
        polyfile.write(catbuf)
        self.catId = self.catId + 1

        polyfile.close()
        region_settings = grass.parse_command('g.region',
                                              flags='p',
                                              delimiter=':')
        pname = polyfile.name.split('/')[-1]
        tmpraster = "rast_" + pname
        tmpvector = "vect_" + pname
        wx.BeginBusyCursor()
        wx.Yield()
        RunCommand('r.in.poly',
                   input=polyfile.name,
                   output=tmpraster,
                   rows=region_settings['rows'],
                   overwrite=True)

        RunCommand('r.to.vect',
                   input=tmpraster,
                   output=tmpvector,
                   type='area',
                   overwrite=True)

        RunCommand('v.to.rast',
                   input=tmpvector,
                   output=rasterName,
                   value=1,
                   use='val')
        wx.EndBusyCursor()
        grass.use_temp_region()
        grass.run_command('g.region', vector=tmpvector)
        region = grass.region()

        marea = MaskedArea(region, rasterName)

        RunCommand('g.remove', flags='f', type='raster', name=tmpraster)
        RunCommand('g.remove', flags='f', type='vector', name=tmpvector)

        os.unlink(polyfile.name)
        return marea
Example #13
0
    def _rectangleDrawn(self):
        """When drawing finished, get region values"""
        mouse = self.mapWindow.mouse
        item = self._registeredGraphics.GetItem(0)
        p1 = self.mapWindow.Pixel2Cell(mouse["begin"])
        p2 = self.mapWindow.Pixel2Cell(mouse["end"])
        item.SetCoords([p1, p2])
        region = {
            "n": max(p1[1], p2[1]),
            "s": min(p1[1], p2[1]),
            "w": min(p1[0], p2[0]),
            "e": max(p1[0], p2[0]),
        }
        item.SetPropertyVal("hide", False)
        self.mapWindow.ClearLines()
        self._registeredGraphics.Draw()
        if self.samplingtype in [SamplingType.MUNITSR, SamplingType.MMVWINR]:
            dlg = wx.MessageDialog(
                self,
                "Is this area ok?",
                "select sampling unit",
                wx.YES_NO | wx.ICON_QUESTION,
            )
            ret = dlg.ShowModal()
            if ret == wx.ID_YES:
                grass.use_temp_region()
                grass.run_command(
                    "g.region",
                    n=region["n"],
                    s=region["s"],
                    e=region["e"],
                    w=region["w"],
                )
                tregion = grass.region()
                self.sampleFrameChanged.emit(region=tregion)
                self.mapWindow.ClearLines()
                item = self._registeredGraphics.GetItem(0)
                item.SetPropertyVal("hide", True)
                layers = self.map_.GetListOfLayers()
                self.mapWindow.ZoomToMap(layers=layers,
                                         ignoreNulls=False,
                                         render=True)
            else:
                self.nextRegion(next=False)
            dlg.Destroy()

        elif self.samplingtype != SamplingType.WHOLE:
            """When drawing finished, get region values"""
            self.sampleFrameChanged.emit(region=region)
Example #14
0
File: data.py Project: caomw/grass
    def _computeRegions(self, width, height, count, startRegion, endRegion=None, zoomValue=None):
        """Computes regions based on start region and end region or zoom value
        for each of the animation frames."""
        currRegion = dict(gcore.region())  # cast to dict, otherwise deepcopy error
        del currRegion['cells']
        del currRegion['cols']
        del currRegion['rows']
        regions = []
        for i in range(self._mapCount):
            if endRegion or zoomValue:
                regions.append(copy.copy(currRegion))
            else:
                regions.append(None)
        if not startRegion:
            self._regions = regions
            return

        startRegionDict = parse_key_val(gcore.read_command('g.region', flags='gu',
                                                                 region=startRegion),
                                              val_type=float)
        if endRegion:
            endRegionDict = parse_key_val(gcore.read_command('g.region', flags='gu',
                                                                   region=endRegion),
                                                val_type=float)
            for key in ('n', 's', 'e', 'w'):
                values = interpolate(startRegionDict[key], endRegionDict[key], self._mapCount)
                for value, region in zip(values, regions):
                    region[key] = value

        elif zoomValue:
            for i in range(self._mapCount):
                regions[i]['n'] -= zoomValue[0] * i
                regions[i]['e'] -= zoomValue[1] * i
                regions[i]['s'] += zoomValue[0] * i
                regions[i]['w'] += zoomValue[1] * i

                # handle cases when north < south and similarly EW
                if regions[i]['n'] < regions[i]['s'] or \
                   regions[i]['e'] < regions[i]['w']:
                        regions[i] = regions[i - 1]

        for region in regions:
            mapwidth = abs(region['e'] - region['w'])
            mapheight = abs(region['n'] - region['s'])
            region['nsres'] = mapheight / height
            region['ewres'] = mapwidth / width

        self._regions = regions
Example #15
0
    def writeArea(self, coords, rasterName):
        polyfile = tempfile.NamedTemporaryFile(delete=False)
        polyfile.write("AREA\n")
        for coor in coords:
            east, north = coor
            point = " %s %s\n" % (east, north)
            polyfile.write(point)

        catbuf = "=%d a\n" % self.catId
        polyfile.write(catbuf)
        self.catId = self.catId + 1

        polyfile.close()
        region_settings = grass.parse_command('g.region', flags='p',
                                              delimiter=':')
        pname = polyfile.name.split('/')[-1]
        tmpraster = "rast_" + pname
        tmpvector = "vect_" + pname
        wx.BeginBusyCursor()
        wx.Yield()
        RunCommand('r.in.poly', input=polyfile.name, output=tmpraster,
                   rows=region_settings['rows'], overwrite=True)

        RunCommand('r.to.vect', input=tmpraster, output=tmpvector,
                   type='area', overwrite=True)

        RunCommand('v.to.rast', input=tmpvector, output=rasterName,
                   value=1, use='val')
        wx.EndBusyCursor()
        grass.use_temp_region()
        grass.run_command('g.region', vector=tmpvector)
        region = grass.region()

        marea = MaskedArea(region, rasterName)

        RunCommand('g.remove', flags='f', type='raster', name=tmpraster)
        RunCommand('g.remove', flags='f', type='vector', name=tmpvector)

        os.unlink(polyfile.name)
        return marea
Example #16
0
    def _rectangleDrawn(self):
        """When drawing finished, get region values"""
        mouse = self.mapWindow.mouse
        item = self._registeredGraphics.GetItem(0)
        p1 = self.mapWindow.Pixel2Cell(mouse['begin'])
        p2 = self.mapWindow.Pixel2Cell(mouse['end'])
        item.SetCoords([p1, p2])
        region = {'n': max(p1[1], p2[1]),
                  's': min(p1[1], p2[1]),
                  'w': min(p1[0], p2[0]),
                  'e': max(p1[0], p2[0])}
        item.SetPropertyVal('hide', False)
        self.mapWindow.ClearLines()
        self._registeredGraphics.Draw(self.mapWindow.pdcTmp)
        if self.samplingtype in [SamplingType.MUNITSR, SamplingType.MMVWINR]:
            dlg = wx.MessageDialog(self, "Is this area ok?",
                                   "select sampling unit",
                                   wx.YES_NO | wx.ICON_QUESTION)
            ret = dlg.ShowModal()
            if ret == wx.ID_YES:
                grass.use_temp_region()
                grass.run_command('g.region', n=region['n'], s=region['s'],
                                  e=region['e'], w=region['w'])
                tregion = grass.region()
                self.sampleFrameChanged.emit(region=tregion)
                self.mapWindow.ClearLines()
                item = self._registeredGraphics.GetItem(0)
                item.SetPropertyVal('hide', True)
                layers = self.map_.GetListOfLayers()
                self.mapWindow.ZoomToMap(layers=layers, ignoreNulls=False,
                                         render=True)
            else:
                self.nextRegion(next=False)
            dlg.Destroy()

        elif self.samplingtype != SamplingType.WHOLE:
            """When drawing finished, get region values"""
            self.sampleFrameChanged.emit(region=region)
Example #17
0
def main():
    """Main function, called at execution time."""

    # parse arguments
    output = options['output']
    lonmap = options['lon']
    latmap = options['lat']
    topg = grass_str_list(options['topg'])
    thk = grass_str_list(options['thk'])
    usurf = grass_str_list(options['usurf'])
    bheatflx = grass_str_list(options['bheatflx'])
    tillphi = grass_str_list(options['tillphi'])
    air_temp = grass_str_list(options['air_temp'])
    air_temp_sd = grass_str_list(options['air_temp_sd'])
    temp_ma = grass_str_list(options['temp_ma'])
    precipitation = grass_str_list(options['precipitation'])
    edgetemp = options['edgetemp']
    edgetopg = options['edgetopg']
    edgewidth = int(options['edgewidth'])
    iceprecip = flags['p']
    celcius = flags['c']
    edgebuffer = flags['e']
    fahrenheit = flags['f']
    nolonlat = flags['x']

    # multiple input is not implemented for topographic maps
    if len(topg) > 1:
        grass.fatal("Multiple topg export is not implemented yet, sorry.")
    if len(thk) > 1:
        grass.fatal("Multiple thk export is not implemented yet, sorry.")
    if len(usurf) > 1:
        grass.fatal("Multiple usurf export is not implemented yet, sorry.")

    # this is here until order of dimensions becomes an option
    twodims = ('x', 'y')

    # read current region
    region = grass.region()
    cols = int(region['cols'])
    rows = int(region['rows'])

    # read current projection
    proj = pyproj.Proj(grass.read_command('g.proj', flags='jf'))

    # open NetCDF file
    nc = PISMDataset(output, 'w', format='NETCDF3_CLASSIC')

    # set global attributes and projection info
    nc.Conventions = 'CF-1.4'
    nc.history = time.strftime('%Y-%m-%d %H:%M:%S %Z: ') + ' '.join(sys.argv)
    nc.proj4 = proj.srs.rstrip()
    mapping = nc.createVariable('mapping', byte)
    mapping.proj4 = proj.srs.rstrip()

    # define the dimensions
    nc.createDimension('time', None)  # None means unlimited
    nc.createDimension('x', cols)
    nc.createDimension('y', rows)
    nc.createDimension('nv', 2)

    # set projection x coordinate
    xvar = nc.createVariable('x', 'f8', ('x',))
    for i in range(cols):
        xvar[i] = region['w'] + (i+.5)*region['ewres']

    # set projection y coordinate
    yvar = nc.createVariable('y', 'f8', ('y',))
    for i in range(rows):
        yvar[i] = region['s'] + (i+.5)*region['nsres']

    # initialize longitude and latitude
    if (lonmap and latmap) or not nolonlat:
        lonvar = nc.createVariable('lon', 'f4', twodims)
        latvar = nc.createVariable('lat', 'f4', twodims)

    # export lon and lat maps if both available
    if lonmap and latmap:
        grass.message("Exporting longitude...")
        lonvar[:] = read_map(lonmap)
        grass.message("Exporting latitude...")
        latvar[:] = read_map(latmap)

    # else compute them using pyproj
    elif not nolonlat:
        grass.message("Longitude and / or latitude map(s) unspecified."
                      "Calculating values from current projection...")
        x = repeat(xvar, rows)
        y = tile(yvar, cols)
        lonvar[:], latvar[:] = proj(x, y, inverse=True)

    # initialize bedrock surface elevation
    if topg or (thk and usurf):
        topgvar = nc.createVariable('topg', 'f4', twodims)

    # initialize land ice thickness
    if thk or (topg and usurf):
        thkvar = nc.createVariable('thk', 'f4', twodims)

    # initialize ice surface elevation
    if usurf or (topg and thk):
        usurfvar = nc.createVariable('usurf', 'f4', twodims)

    # export available topographic maps
    if topg:
        grass.message("Exporting bed surface elevation...")
        topgvar.set_maps(topg)
    if thk:
        grass.message("Exporting land ice thickness...")
        thkvar.set_maps(thk)
    if usurf:
        grass.message("Exporting ice surface elevation...")
        usurfvar.set_maps(usurf)

    # possibly compute the rest
    if not topg and (thk and usurf):
        grass.message("Computing land ice thickness...")
        topgvar[:] = usurfvar[:] - thkvar[:]
    if not thk and (topg and usurf):
        grass.message("Computing land ice thickness...")
        thkvar[:] = usurfvar[:] - topgvar[:]
    if not usurf and (topg and thk):
        grass.message("Computing ice surface elevation...")
        usurfvar[:] = topgvar[:] + thkvar[:]

    # assign given edge topography at domain edges
    if (topg or (thk and usurf)) and edgebuffer:
        topgvar[:edgewidth, :] = topgvar[-edgewidth:, :] = edgetopg
        topgvar[:, :edgewidth] = topgvar[:, -edgewidth:] = edgetopg

    # set geothermic flux
    if bheatflx:
        bheatflxvar = nc.createVariable('bheatflx', 'f4', twodims)
        grass.message("Exporting geothermic flux...")
        bheatflxvar.set_maps(bheatflx)

    # set till friction angle
    if tillphi:
        tillphivar = nc.createVariable('tillphi', 'f4', twodims)
        grass.message("Exporting till friction angle...")
        tillphivar.set_maps(tillphi)

    # set near-surface air temperature (air_temp)
    if air_temp:
        air_tempvar = nc.createVariable('air_temp', 'f4', get_dim(air_temp))
        if celcius:
            air_tempvar.units = 'degC'
        elif fahrenheit:
            air_tempvar.units = 'degF'
        grass.message("Exporting near-surface air temperature...")
        air_tempvar.set_maps(air_temp)

    # assign given edge temperature at domain edges
    if air_temp and edgebuffer:
        for i in range(air_tempvar.shape[0]):
            air_tempvar[i, :edgewidth, :] = edgetemp
            air_tempvar[i, -edgewidth:, :] = edgetemp
            air_tempvar[i, :, :edgewidth] = edgetemp
            air_tempvar[i, :, -edgewidth:] = edgetemp

    # set standard deviation of near-surface air temperature (air_temp_sd)
    if air_temp_sd:
        air_temp_sdvar = nc.createVariable(
            'air_temp_sd', 'f4', get_dim(air_temp_sd))
        grass.message("Exporting standard deviation of near-surface air "
                      "temperature...")
        air_temp_sdvar.set_maps(air_temp_sd)

    # set mean annual air temperature (temp_ma)
    if temp_ma:
        temp_mavar = nc.createVariable('temp_ma', 'f4', get_dim(temp_ma))
        if celcius:
            temp_mavar.units = 'degC'
        elif fahrenheit:
            temp_mavar.units = 'degF'
        grass.message("Exporting mean annual air temperature...")
        temp_mavar.set_maps(temp_ma)

    # set annual snow precipitation
    if precipitation:
        precipitationvar = nc.createVariable(
            'precipitation', 'f4', get_dim(precipitation))
        precipitationvar.long_name = (
            'mean annual %sprecipitation rate'
            % ('ice-equivalent ' if iceprecip else ''))
        grass.message("Exporting precipitation rate...")
        precipitationvar.set_maps(
            precipitation,
            scalefactor=(1/0.91 if iceprecip else 1.0))

    # set time coordinate and time bounds
    timevar = nc.createVariable('time', 'f8', ('time',))
    time_boundsvar = nc.createVariable('time_bounds', 'f8', ('time', 'nv'))
    for i in range(len(nc.dimensions['time'])):
        timevar[i] = i
        time_boundsvar[i, :] = [i, i+1]

    # close NetCDF file
    nc.close()
    grass.message("NetCDF file " + output + " created")
def main():
    size = int(options['size'])
    gamma = scale = None
    if options['gamma']:
        gamma = float(options['gamma'])
    if options['scaling_factor']:
        scale = float(options['scaling_factor'])
    input_dev = options['input']
    output = options['output']
    method = options['method']

    if method in ('gravity', 'kernel') and (gamma is None or scale is None):
        gcore.fatal(
            _("Methods gravity and kernel require options scaling_factor and gamma"
              ))

    temp_map = 'tmp_futures_devPressure_' + str(os.getpid()) + '_copy'
    temp_map_out = 'tmp_futures_devPressure_' + str(os.getpid()) + '_out'
    temp_map_nulls = 'tmp_futures_devPressure_' + str(os.getpid()) + '_nulls'
    global TMP, TMPFILE
    if flags['n']:
        gcore.message(_("Preparing data..."))
        region = gcore.region()
        gcore.use_temp_region()
        gcore.run_command('g.region',
                          n=region['n'] + size * region['nsres'],
                          s=region['s'] - size * region['nsres'],
                          e=region['e'] + size * region['ewres'],
                          w=region['w'] - size * region['ewres'])
        TMP.append(temp_map)
        TMP.append(temp_map_nulls)
        TMP.append(temp_map_out)
        exp = "{temp_map_nulls} = if(isnull({inp}), 1, null())".format(
            temp_map_nulls=temp_map_nulls, inp=input_dev)
        grast.mapcalc(exp=exp)
        grast.mapcalc(exp="{temp} = if(isnull({inp}), 0, {inp})".format(
            temp=temp_map, inp=input_dev))
        rmfilter_inp = temp_map
        rmfilter_out = temp_map_out
    else:
        rmfilter_inp = input_dev
        rmfilter_out = output

    matrix = distance_matrix(size)
    if method == 'occurrence':
        matrix[matrix > 0] = 1
    elif method == 'gravity':
        with np.errstate(divide='ignore'):
            denom = np.power(matrix, gamma)
            matrix = scale / denom
            matrix[denom == 0] = 0
    else:
        matrix_ = scale * np.exp(-2 * matrix / gamma)
        matrix = np.where(matrix > 0, matrix_, 0)

    path = gcore.tempfile()
    global TMPFILE
    TMPFILE = path

    with open(path, 'w') as f:
        f.write(write_filter(matrix))
    gcore.message(_("Running development pressure filter..."))
    gcore.run_command('r.mfilter',
                      input=rmfilter_inp,
                      output=rmfilter_out,
                      filter=path)

    if flags['n']:
        gcore.run_command(
            'g.region',
            n=region['n'],
            s=region['s'],
            e=region['e'],
            w=region['w'],
        )
        grast.mapcalc(
            exp="{out} = if(isnull({temp_null}), {rmfilter_out}, null())".
            format(temp_null=temp_map_nulls,
                   rmfilter_out=rmfilter_out,
                   out=output))
        gcore.del_temp_region()

    grast.raster_history(output)
Example #19
0
def main():
    # Take into account those extra pixels we'll be a addin'
    max_cols = int(options['maxcols']) - int(options['overlap'])
    max_rows = int(options['maxrows']) - int(options['overlap'])

    if max_cols == 0:
        gcore.fatal(_("It is not possibile to set 'maxcols=%s' and "
                      "'overlap=%s'. Please set maxcols>overlap" %
                      (options['maxcols'], options['overlap'])))
    elif max_rows == 0:
        gcore.fatal(_("It is not possibile to set 'maxrows=%s' and "
                      "'overlap=%s'. Please set maxrows>overlap" %
                      (options['maxrows'], options['overlap'])))
    # destination projection
    if not options['destproj']:
        dest_proj = gcore.read_command('g.proj',
                                       quiet=True,
                                       flags='jf').rstrip('\n')
        if not dest_proj:
            gcore.fatal(_('g.proj failed'))
    else:
        dest_proj = options['destproj']
    gcore.debug("Getting destination projection -> '%s'" % dest_proj)

    # projection scale
    if not options['destscale']:
        ret = gcore.parse_command('g.proj',
                                  quiet=True,
                                  flags='j')
        if not ret:
            gcore.fatal(_('g.proj failed'))

        if '+to_meter' in ret:
            dest_scale = ret['+to_meter'].strip()
        else:
            gcore.warning(
                _("Scale (%s) not found, assuming '1'") %
                '+to_meter')
            dest_scale = '1'
    else:
        dest_scale = options['destscale']
    gcore.debug('Getting destination projection scale -> %s' % dest_scale)

    # set up the projections
    srs_source = {'proj': options['sourceproj'],
                  'scale': float(options['sourcescale'])}
    srs_dest = {'proj': dest_proj, 'scale': float(dest_scale)}

    if options['region']:
        gcore.run_command('g.region',
                          quiet=True,
                          region=options['region'])
    dest_bbox = gcore.region()
    gcore.debug('Getting destination region')

    # output field separator
    fs = separator(options['separator'])

    # project the destination region into the source:
    gcore.verbose('Projecting destination region into source...')
    dest_bbox_points = bboxToPoints(dest_bbox)

    dest_bbox_source_points, errors_dest = projectPoints(dest_bbox_points,
                                                         source=srs_dest,
                                                         dest=srs_source)

    if len(dest_bbox_source_points) == 0:
        gcore.fatal(_("There are no tiles available. Probably the output "
                      "projection system it is not compatible with the "
                      "projection of the current location"))

    source_bbox = pointsToBbox(dest_bbox_source_points)

    gcore.verbose('Projecting source bounding box into destination...')

    source_bbox_points = bboxToPoints(source_bbox)

    source_bbox_dest_points, errors_source = projectPoints(source_bbox_points,
                                                           source=srs_source,
                                                           dest=srs_dest)

    x_metric = 1 / dest_bbox['ewres']
    y_metric = 1 / dest_bbox['nsres']

    gcore.verbose('Computing length of sides of source bounding box...')

    source_bbox_dest_lengths = sideLengths(source_bbox_dest_points,
                                           x_metric, y_metric)

    # Find the skewedness of the two directions.
    # Define it to be greater than one
    # In the direction (x or y) in which the world is least skewed (ie north south in lat long)
    # Divide the world into strips. These strips are as big as possible contrained by max_
    # In the other direction do the same thing.
    # Theres some recomputation of the size of the world that's got to come in
    # here somewhere.

    # For now, however, we are going to go ahead and request more data than is necessary.
    # For small regions far from the critical areas of projections this makes very little difference
    # in the amount of data gotten.
    # We can make this efficient for big regions or regions near critical
    # points later.

    bigger = []
    bigger.append(max(source_bbox_dest_lengths['x']))
    bigger.append(max(source_bbox_dest_lengths['y']))
    maxdim = (max_cols, max_rows)

    # Compute the number and size of tiles to use in each direction
    # I'm making fairly even sized tiles
    # They differer from each other in height and width only by one cell
    # I'm going to make the numbers all simpler and add this extra cell to
    # every tile.

    gcore.message(_('Computing tiling...'))
    tiles = [-1, -1]
    tile_base_size = [-1, -1]
    tiles_extra_1 = [-1, -1]
    tile_size = [-1, -1]
    tileset_size = [-1, -1]
    tile_size_overlap = [-1, -1]
    for i in range(len(bigger)):
        # make these into integers.
        # round up
        bigger[i] = int(bigger[i] + 1)
        tiles[i] = int((bigger[i] / maxdim[i]) + 1)
        tile_size[i] = tile_base_size[i] = int(bigger[i] / tiles[i])
        tiles_extra_1[i] = int(bigger[i] % tiles[i])
        # This is adding the extra pixel (remainder) to all of the tiles:
        if tiles_extra_1[i] > 0:
            tile_size[i] = tile_base_size[i] + 1
        tileset_size[i] = int(tile_size[i] * tiles[i])
        # Add overlap to tiles (doesn't effect tileset_size
        tile_size_overlap[i] = tile_size[i] + int(options['overlap'])

    gcore.verbose("There will be %d by %d tiles each %d by %d cells" %
                  (tiles[0], tiles[1], tile_size[0], tile_size[1]))

    ximax = tiles[0]
    yimax = tiles[1]

    min_x = source_bbox['w']
    min_y = source_bbox['s']
    max_x = source_bbox['e']
    max_y = source_bbox['n']
    span_x = (max_x - min_x)
    span_y = (max_y - min_y)

    xi = 0
    tile_bbox = {'w': -1, 's': -1, 'e': -1, 'n': -1}

    if errors_dest > 0:
        gcore.warning(_("During computation %i tiles could not be created" %
                        errors_dest))

    while xi < ximax:
        tile_bbox['w'] = float(
            min_x) + (float(xi) * float(tile_size[0]) / float(tileset_size[0])) * float(span_x)
        tile_bbox['e'] = float(min_x) + (float(xi + 1) * float(tile_size_overlap[0]
                                                               ) / float(tileset_size[0])) * float(span_x)
        yi = 0
        while yi < yimax:
            tile_bbox['s'] = float(
                min_y) + (float(yi) * float(tile_size[1]) / float(tileset_size[1])) * float(span_y)
            tile_bbox['n'] = float(min_y) + (
                float(yi + 1) * float(tile_size_overlap[1]) /
                float(tileset_size[1])) * float(span_y)
            tile_bbox_points = bboxToPoints(tile_bbox)
            tile_dest_bbox_points, errors = projectPoints(tile_bbox_points,
                                                          source=srs_source,
                                                          dest=srs_dest)
            tile_dest_bbox = pointsToBbox(tile_dest_bbox_points)
            if bboxesIntersect(tile_dest_bbox, dest_bbox):
                if flags['w']:
                    print("bbox=%s,%s,%s,%s&width=%s&height=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                elif flags['g']:
                    print("w=%s;s=%s;e=%s;n=%s;cols=%s;rows=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                else:
                    print("%s%s%s%s%s%s%s%s%s%s%s" %
                          (tile_bbox['w'], fs, tile_bbox['s'], fs,
                           tile_bbox['e'], fs, tile_bbox['n'], fs,
                           tile_size_overlap[0], fs, tile_size_overlap[1]))
            yi += 1
        xi += 1
Example #20
0
def main(input_, coordinates, output, axes, slice_line, units, offset):
    prefix = 'r3_to_rast_tmp_' + str(os.getpid())
    gcore.run_command('r3.to.rast', input=input_, output=prefix)
    maps = gcore.read_command('g.list', type='raster', pattern=PREFIX + '*').strip().split(os.linesep)
    region = gcore.region(region3d=True)
    res = (region['ewres'] + region['nsres']) / 2.
    if coordinates[0][0] > coordinates[1][0]:
        coordinates = (coordinates[1], coordinates[0])
    profile = gcore.read_command('r.profile', coordinates=coordinates,
                                 input=maps[0], output='-').strip().split(os.linesep)
    cols = len(profile)
    rows = len(maps)
    s = w = 0
    e = cols * res
    n = rows * region['tbres']
    if offset:
        offset[0] = w + (e - w) * float(offset[0])
        offset[1] = (n - s) * float(offset[1])
        e += offset[0]
        w += offset[0]
        n += offset[1]
        s += offset[1]
    ascii_input = [("north: {n}\nsouth: {s}\neast: {e}\nwest: {w}\n"
                   "rows: {r}\ncols: {c}\n".format(n=n, s=s, e=e, w=w, r=rows, c=cols))]
    for map_ in reversed(maps):
        profile = gcore.read_command('r.profile', coordinates=coordinates,
                                     input=map_, output='-', quiet=True).strip().split(os.linesep)
        ascii_input.append(' '.join([line.split()[1] for line in profile]))

    gcore.write_command('r.in.ascii', input='-', stdin='\n'.join(ascii_input),
                        output=output, type='FCELL')

    gcore.run_command('r.colors', map=output, raster_3d=input_)

    if slice_line:
        vector_ascii = []
        vector_ascii.append('L 2 1')
        vector_ascii.append('{x} {y}'.format(x=coordinates[0][0], y=coordinates[0][1]))
        vector_ascii.append('{x} {y}'.format(x=coordinates[1][0], y=coordinates[1][1]))
        vector_ascii.append('1 1')
        gcore.write_command('v.in.ascii', format='standard', input='-', stdin='\n'.join(vector_ascii),
                            flags='n', output=slice_line)

    if axes:
        vector_ascii = []
        vector_ascii.append('L 2 1')
        vector_ascii.append('{x} {y}'.format(x=w, y=n + 0.1 * (n - s)))
        vector_ascii.append('{x} {y}'.format(x=e, y=n + 0.1 * (n - s)))
        vector_ascii.append('1 1')
        vector_ascii.append('P 1 1')
        vector_ascii.append('{x} {y}'.format(x=w, y=n + 0.1 * (n - s)))
        vector_ascii.append('2 1')
        vector_ascii.append('P 1 1')
        vector_ascii.append('{x} {y}'.format(x=e, y=n + 0.1 * (n - s)))
        vector_ascii.append('2 2')

        vector_ascii.append('L 2 1')
        vector_ascii.append('{x} {y}'.format(x=e + 0.05 * (e - w), y=n))
        vector_ascii.append('{x} {y}'.format(x=e + 0.05 * (e - w), y=s))
        vector_ascii.append('2 3')
        vector_ascii.append('P 1 1')
        vector_ascii.append('{x} {y}'.format(x=e + 0.05 * (e - w), y=n))
        vector_ascii.append('1 2')
        vector_ascii.append('P 1 1')
        vector_ascii.append('{x} {y}'.format(x=e + 0.05 * (e - w), y=s))
        vector_ascii.append('1 3')

        gcore.write_command('v.in.ascii', format='standard', input='-', stdin='\n'.join(vector_ascii),
                            flags='n', output=axes)
        if units:
            units = units.split(',')
        else:
            units = ['', '']
        gcore.run_command('v.db.addtable', map=axes, layer=1, columns="label varchar(50)")
        sql = ('UPDATE {axes} SET label = "{length} {u1}" WHERE cat = 1;\n'
               'UPDATE {axes} SET label = "{top} {u2}" WHERE cat = 2;\n'
               'UPDATE {axes} SET label = "{bottom} {u2}" WHERE cat = 3;\n'.format(axes=axes, length=int(e - w),
                                                                            top=region['t'], bottom=region['b'],
                                                                            u1=units[0], u2=units[1]))
        gcore.write_command('db.execute', input='-', stdin=sql)
Example #21
0
def main():
    size = int(options["size"])
    gamma = scale = None
    if options["gamma"]:
        gamma = float(options["gamma"])
    if options["scaling_factor"]:
        scale = float(options["scaling_factor"])
    input_dev = options["input"]
    output = options["output"]
    method = options["method"]

    if method in ("gravity", "kernel") and (gamma is None or scale is None):
        gcore.fatal(
            _("Methods gravity and kernel require options scaling_factor and gamma"
              ))

    temp_map = "tmp_futures_devPressure_" + str(os.getpid()) + "_copy"
    temp_map_out = "tmp_futures_devPressure_" + str(os.getpid()) + "_out"
    temp_map_nulls = "tmp_futures_devPressure_" + str(os.getpid()) + "_nulls"
    global TMP, TMPFILE
    if flags["n"]:
        gcore.message(_("Preparing data..."))
        region = gcore.region()
        gcore.use_temp_region()
        gcore.run_command(
            "g.region",
            n=region["n"] + size * region["nsres"],
            s=region["s"] - size * region["nsres"],
            e=region["e"] + size * region["ewres"],
            w=region["w"] - size * region["ewres"],
        )
        TMP.append(temp_map)
        TMP.append(temp_map_nulls)
        TMP.append(temp_map_out)
        exp = "{temp_map_nulls} = if(isnull({inp}), 1, null())".format(
            temp_map_nulls=temp_map_nulls, inp=input_dev)
        grast.mapcalc(exp=exp)
        grast.mapcalc(exp="{temp} = if(isnull({inp}), 0, {inp})".format(
            temp=temp_map, inp=input_dev))
        rmfilter_inp = temp_map
        rmfilter_out = temp_map_out
    else:
        rmfilter_inp = input_dev
        rmfilter_out = output

    matrix = distance_matrix(size)
    if method == "occurrence":
        matrix[matrix > 0] = 1
    elif method == "gravity":
        with np.errstate(divide="ignore"):
            denom = np.power(matrix, gamma)
            matrix = scale / denom
            matrix[denom == 0] = 0
    else:
        matrix_ = scale * np.exp(-2 * matrix / gamma)
        matrix = np.where(matrix > 0, matrix_, 0)

    path = gcore.tempfile()
    global TMPFILE
    TMPFILE = path

    with open(path, "w") as f:
        f.write(write_filter(matrix))
    gcore.message(_("Running development pressure filter..."))
    gcore.run_command("r.mfilter",
                      input=rmfilter_inp,
                      output=rmfilter_out,
                      filter=path)

    if flags["n"]:
        gcore.run_command(
            "g.region",
            n=region["n"],
            s=region["s"],
            e=region["e"],
            w=region["w"],
        )
        grast.mapcalc(
            exp="{out} = if(isnull({temp_null}), {rmfilter_out}, null())".
            format(temp_null=temp_map_nulls,
                   rmfilter_out=rmfilter_out,
                   out=output))
        gcore.del_temp_region()

    grast.raster_history(output)
Example #22
0
def rasterize_vectors_and_load_to_db(
    grassdb,
    grass_location,
    qgis_prefix_path,
    mask,
    vector_path,
    attribue_name,
    raster_name,
):

    QgsApplication.setPrefixPath(qgis_prefix_path, True)
    Qgs = QgsApplication([], False)
    Qgs.initQgis()
    from processing.core.Processing import Processing
    from processing.tools import dataobjects
    from qgis import processing

    feedback = QgsProcessingFeedback()
    Processing.initialize()
    QgsApplication.processingRegistry().addProvider(QgsNativeAlgorithms())
    context = dataobjects.createContext()
    context.setInvalidGeometryCheck(QgsFeatureRequest.GeometryNoCheck)

    mask_layer = qgis_raster_read_raster(
        processing, os.path.join(grassdb, mask + ".tif")
    )  ### load DEM raster as a  QGIS raster object to obtain attribute
    cellSize, SpRef_in = qgis_raster_return_raster_properties(
        processing, mask_layer)  ### Get Raster cell size

    # load grass working location
    import grass.script as grass
    import grass.script.setup as gsetup
    from grass.pygrass.modules import Module
    from grass.pygrass.modules.shortcuts import general as g
    from grass.pygrass.modules.shortcuts import raster as r
    from grass.script import array as garray
    from grass.script import core as gcore
    from grass_session import Session

    os.environ.update(
        dict(GRASS_COMPRESS_NULLS="1",
             GRASS_COMPRESSOR="ZSTD",
             GRASS_VERBOSE="1"))
    PERMANENT = Session()
    PERMANENT.open(gisdb=grassdb, location=grass_location, create_opts="")

    # get dem array and get nrows and ncols of the domain
    strtemp_array = Return_Raster_As_Array_With_garray(garray, mask)
    ncols = int(strtemp_array.shape[1])
    nrows = int(strtemp_array.shape[0])
    grsregion = gcore.region()

    qgis_raster_gdal_rasterize(
        processing,
        context,
        INPUT=vector_path,
        Column_nm=attribue_name,
        cellsize=cellSize,
        w=grsregion["w"],
        s=grsregion["s"],
        e=grsregion["e"],
        n=grsregion["n"],
        OUTPUT=os.path.join(grassdb, raster_name + ".tif"),
    )

    grass_raster_r_in_gdal(
        grass,
        raster_path=os.path.join(grassdb, raster_name + ".tif"),
        output_nm=raster_name,
    )

    grass_raster_setnull(grass,
                         raster_nm=raster_name,
                         null_values=[-9999],
                         create_new_raster=False)

    grass_raster_v_import(grass,
                          input_path=vector_path,
                          output_vector_nm=raster_name)
    Qgs.exit()
    PERMANENT.close()
  wbfile = os.path.basename(wbpath)
  wb.append(wbfile[:-4])

for i in range(len(wbfiles)):
  grass.run_command('r.in.ascii', input=wbfiles[i], output=wb[i], overwrite=True)


# INTERPOLATE AT LOW RES TO GET ALL NEARSHORE NULL VALUES
age = []
for w in wb:
  age.append(w[3:])
  
for i in range(len(wb)):
  grass.run_command('g.rename', rast=wb[i] + ',wb_orig_' + age[i], overwrite=True)

reg = grass.region() # Get original region parameters
grass.run_command('g.region', flags='p', res=3.75)
for i in range(len(wb)):
#  os.system("r.fillnulls input=wb_orig_" + age[i] + " output=" + wb[i] + " --o")
  grass.run_command('r.fillnulls', input='wb_orig_' + age[i], output=wb[i], overwrite=True)



# NOW, RESAMPLE TO HIGH RES
# Actually, no resampling needed :) Just doing it nearest-neighbor style
# Actually, it is needed: areas of cells are important and need to be remembered 

for i in range(len(wb)):
  grass.run_command('g.rename', rast=wb[i] + ',wb_coarse_' + age[i], overwrite=True)

# This is the non-interpolating way
Example #24
0
def compute(pnt, dem, obs_heigh, maxdist, hcurv, downward, oradius, i, nprocs, obsabselev, memory):
    try:
        #the followig lines help to set a delay between a process and the others
        starting.acquire() # no other process can get it until it is released
        threading.Timer(0.1, starting.release).start() # release in a 0.1 seconds
        #using temporary regions (on the same mapset) for the different parallel computations
        gscript.use_temp_region()
        #extracting a point from the map of the locations
        Module("v.extract", input=pnt, output="zzpnt"+i, cats=i, flags="t", overwrite=True, quiet=True)
        #getting goordinates of the point location
        coords=Module("v.to.db", flags="p", map="zzpnt"+i, type="point", option="coor", separator="|", stdout_=PIPE)
        coords=coords.outputs.stdout.splitlines()[1:][0]
        x=float(coords.split("|")[1])
        y=float(coords.split("|")[2])
        z=float(coords.split("|")[3])
        coords=str(x)+","+str(y)
        #get elevation of the terrain at the point location
        querydem=Module("r.what", coordinates=coords.split(), map=dem, stdout_=PIPE)
        obselev=float(querydem.outputs.stdout.split("|")[3])
        #setting the working region around the point location
        Module("g.region", vector="zzpnt"+i)
        region = grasscore.region()
        E = region['e']
        W = region['w']
        N = region['n']
        S = region['s']
        #Module("g.region", flags="a", e=E+maxdist, w=W-maxdist, s=S-maxdist, n=N+maxdist) 
        Module("g.region", align=dem, e=E+maxdist, w=W-maxdist, s=S-maxdist, n=N+maxdist)
        #now we check if the size of the object for which we calculate solid angle in each pixel is equal to half the resolution or is set by the user
        if oradius == 0: 
            circle_radius=region['nsres']/2
        else:
            circle_radius=oradius
        #Executing viewshed analysis
        if obsabselev:
            relative_height = z - obselev
            #message1 = "* Considered elevation of dem/dsm is: %s *"
            #message2 = "* Relative height of observer above the dem/dsm is: %s *"
            #message3 = "* Absolute elevation of observer used in r.viewshed is: %s *"
            #gscript.message(message1 % (str(obselev)) )
            #gscript.message(message2 % (str(relative_height)))
            #gscript.message(message3 % (str(z)))
            if hcurv:
                Module("r.viewshed", input=dem, output="zzview"+i, coordinates=coords.split(), memory=memory, observer_elevation=relative_height, max_distance=maxdist,  flags="c", overwrite=True, quiet=True)
            else:
                Module("r.viewshed", input=dem, output="zzview"+i, coordinates=coords.split(), memory=memory, observer_elevation=relative_height, max_distance=maxdist, overwrite=True, quiet=True)                
            if downward:
                #Since UAV nor Satellite are not expected to see above their level (they are only looking to the ground) vertical angles above 90 are set to null. 
                Module("r.mapcalc", expression="zzview{I} = if(zzview{I}>90 && zzview{I}<180,null(),zzview{I})".format(I=i), overwrite=True, quiet=True)
        else:
            #message1 = "* Considered elevation of dem/dsm is: %s *"
            #message2 = "* Relative height of observer above the dem/dsm is: %s *"
            #message3 = "* Absolute elevation of observer used in r.viewshed is: %s *"
            #gscript.message(message1 % (str(obselev)) )
            #gscript.message(message2 % (str(obs_heigh)))
            #gscript.message(message3 % (str(obselev + obs_heigh)))            
            if hcurv:
                Module("r.viewshed", input=dem, output="zzview"+i, coordinates=coords.split(), memory=memory, observer_elevation=obs_heigh, max_distance=maxdist, flags="c", overwrite=True, quiet=True)
            else:
                Module("r.viewshed", input=dem, output="zzview"+i, coordinates=coords.split(), memory=memory, observer_elevation=obs_heigh, max_distance=maxdist, overwrite=True, quiet=True)
        #Since r.viewshed set the cell of the output visibility layer to 180 under the point, this cell is set to 0.01 
        Module("r.mapcalc",expression="zzview{I} = if(zzview{I}==180,0,zzview{I})".format(I=i), overwrite=True, quiet=True)
        #estimating the layer of the horizontal angle between point and each visible cell (angle of the horizontal line of sight)         
        Module("r.mapcalc", expression="{A} = \
            if( y()>{py} && x()>{px}, atan(({px}-x())/({py}-y())),  \
            if( y()<{py} && x()>{px}, 180+atan(({px}-x())/({py}-y())),  \
            if( y()<{py} && x()<{px}, 180+atan(({px}-x())/({py}-y())),  \
            if( y()>{py} && x()<{px}, 360+atan(({px}-x())/({py}-y())), \
            if( y()=={py} && x()>{px}, 90, \
            if( y()<{py} && x()=={px}, 180, \
            if( y()=={py} && x()<{px}, 270, \
            if( y()>{py} && x()=={px}, 0 \
            ) ) ) ) ) ) ) )".format(A='zzview_angle'+i,py=y, px=x), overwrite=True, quiet=True)
        #estimating the layer of the vertical angle between point and each visible cell  (angle of the vertical line of sight) ()
        Module("r.mapcalc", expression="zzview90_{I} = zzview{I} - 90".format(I=i), overwrite=True, quiet=True)
        #evaluate the vertical component of the versor oriented along the line of sight         
        Module("r.mapcalc", expression="zzc_view{I} = sin(zzview90_{I})".format(I=i), overwrite=True, quiet=True)
        #evaluate the northern component of the versor oriented along the line of sight  
        Module("r.mapcalc", expression="zzb_view{I} = cos(zzview90_{I})*cos(zzview_angle{I})".format(I=i), overwrite=True, quiet=True)
        #evaluate the eastern component of the versor oriented along the line of sight  
        Module("r.mapcalc", expression="zza_view{I} = cos(zzview90_{I})*sin(zzview_angle{I})".format(I=i), overwrite=True, quiet=True)    
        #estimate the three-dimensional distance between the point and each visible cell
        if obsabselev:
            Module("r.mapcalc", expression="{D} = pow(pow(abs(y()-{py}),2)+pow(abs(x()-{px}),2)+pow(abs({dtm}-{Z}),2),0.5)".format(D='zzdistance'+i, dtm=dem, Z=z, py=y, px=x), overwrite=True, quiet=True)
        else:
            Module("r.mapcalc", expression="{D} = pow(pow(abs(y()-{py}),2)+pow(abs(x()-{px}),2)+pow(abs({dtm}-({obs}+{obs_h})),2),0.5)".format(D='zzdistance'+i, dtm=dem, obs=obselev, obs_h=obs_heigh, py=y, px=x), overwrite=True, quiet=True)
        
        #estimating the layer of the angle between the versor of the terrain and the line of sight
        Module("r.mapcalc", expression="zzangle{I} = acos((zza_view{I}*zza_dem+zzb_view{I}*zzb_dem+zzc_view{I}*zzc_dem)/(sqrt(zza_view{I}*zza_view{I}+zzb_view{I}*zzb_view{I}+zzc_view{I}*zzc_view{I})*sqrt(zza_dem*zza_dem+zzb_dem*zzb_dem+zzc_dem*zzc_dem)))".format(I=i), overwrite=True, quiet=True)
        #in rare cases the angles may results, erroneusly, less than 90. Setting them to 90
        Module("r.mapcalc", expression="zzangle{I} = if(zzangle{I} > 90, zzangle{I}, 90)".format(I=i), overwrite=True, quiet=True) 
        #filtering 3d distance based on angle{I} map
        Module("r.mapcalc", expression="{D} = if(isnull(zzangle{I}),null(),{D})".format(D="zzdistance"+str(i),I=i), overwrite=True, quiet=True)
        #calculating H1 and H2 that are the distances from the observer to the more distant and less distant points of the inclinded circle representing the pixel
        Module("r.mapcalc", expression="zzH1_{I} = pow(pow({r},2)+pow({d},2)-(2*{r}*{d}*cos(270-zzangle{I})),0.5)".format(r=circle_radius,d="zzdistance"+str(i),I=i), overwrite=True, quiet=True) 
        Module("r.mapcalc", expression="zzH2_{I} = pow(pow({r},2)+pow({d},2)-(2*{r}*{d}*cos(zzangle{I}-90)),0.5)".format(r=circle_radius,d="zzdistance"+str(i),I=i), overwrite=True, quiet=True) 
        #calculating B1 and B2 that are the angles between the line passing through the observer and the center of the pixel and the distant and less distant points of the inclinded circle representing the pixel
        Module("r.mapcalc", expression="zzB1_{I} = acos( (pow({r},2)-pow(zzH1_{I},2)-pow({d},2)) / (-2*zzH1_{I}*{d}) ) ".format(r=circle_radius,d="zzdistance"+str(i),I=i), overwrite=True, quiet=True) 
        Module("r.mapcalc", expression="zzB2_{I} = acos( (pow({r},2)-pow(zzH2_{I},2)-pow({d},2)) / (-2*zzH2_{I}*{d}) ) ".format(r=circle_radius,d="zzdistance"+str(i),I=i), overwrite=True, quiet=True) 
        #calculating solid angle considering that the area of an asimetric ellipse is equal to the one of an ellipse having the minor axis equal to the sum of the tqo unequal half minor axes 
        Module("r.mapcalc", expression="zzsangle{I} = ({pi}*{r}*( {d}*tan(zzB1_{I}) + {d}*tan(zzB2_{I}) )/2 )  / (pow({r},2)+pow({d},2)) ".format(r=circle_radius,d="zzdistance"+str(i),I=i,pi=pi), overwrite=True, quiet=True) 
        #approximations for calculating solid angle can create too much larger values under or very close the position of the oserver. in such a case we assume that the solid angle is half of the visible sphere (2*pi)
        #The same occur when it is used an object_radius that is larger than the pixel size. In some cases this can produce negative values of zzB2 with the effect of creating negative values 
        Module("r.mapcalc", expression="zzsangle{I} = if(zzsangle{I}>2*{pi} || zzB2_{I}>=90,2*{pi},zzsangle{I})".format(I=i, pi=pi), overwrite=True, quiet=True)
        #removing temporary region    
        gscript.del_temp_region()
    except:
        #cleaning termporary layers
        #cleanup()
        #message = " ******** Something went wrong: please try to reduce the number of CPU (parameter 'procs') ******* "
        #gscript.message(message)
        #sys.exit()
        f = open("error_cat_"+i+".txt", "x")
        f.write("error in category: "+i)
        f.close()
Example #25
0
def main():
    infile = options['input']
    output = options['output']
    method = options['method']
    dtype = options['type']
    fs = options['separator']
    x = options['x']
    y = options['y']
    z = options['z']
    value_column = options['value_column']
    vrange = options['vrange']
    vscale = options['vscale']
    percent = options['percent']
    pth = options['pth']
    trim = options['trim']
    workers = int(options['workers'])
    scan_only = flags['s']
    shell_style = flags['g']
    ignore_broken = flags['i']

    if workers is 1 and "WORKERS" in os.environ:
        workers = int(os.environ["WORKERS"])

    if not os.path.exists(infile):
        grass.fatal(_("Unable to read input file <%s>") % infile)

    addl_opts = {}
    if pth:
        addl_opts['pth'] = '%s' % pth
    if trim:
        addl_opts['trim'] = '%s' % trim
    if value_column:
        addl_opts['value_column'] = '%s' % value_column
    if vrange:
        addl_opts['vrange'] = '%s' % vrange
    if vscale:
        addl_opts['vscale'] = '%s' % vscale
    if ignore_broken:
        addl_opts['flags'] = 'i'

    if scan_only or shell_style:
        if shell_style:
            doShell = 'g'
        else:
            doShell = ''
        grass.run_command('r.in.xyz', flags='s' + doShell, input=infile,
                          output='dummy', sep=fs, x=x, y=y, z=z,
                          **addl_opts)
        sys.exit()

    if dtype == 'float':
        data_type = 'FCELL'
    else:
        data_type = 'DCELL'

    region = grass.region(region3d=True)

    if region['nsres'] != region['nsres3'] or region['ewres'] != region['ewres3']:
        grass.run_command('g.region', flags='3p')
        grass.fatal(_("The 2D and 3D region settings are different. Can not continue."))

    grass.verbose(_("Region bottom=%.15g  top=%.15g  vertical_cell_res=%.15g  (%d depths)")
                  % (region['b'], region['t'], region['tbres'], region['depths']))

    grass.verbose(_("Creating slices ..."))

    # to avoid a point which falls exactly on a top bound from being
    # considered twice, we shrink the
    # For the top slice we keep it though, as someone scanning the bounds
    # may have set the bounds exactly to the data extent (a bad idea, but
    # it happens..)
    eps = 1.0e-15

    # if there are thousands of depths hopefully this array doesn't get too
    # large and so we don't have to worry much about storing/looping through
    # all the finished process infos.
    proc = {}
    pout = {}

    depths = list(range(1, 1 + region['depths']))

    for i in depths:
        tmp_layer_name = 'tmp.r3xyz.%d.%s' % (os.getpid(), '%05d' % i)

        zrange_min = region['b'] + (region['tbres'] * (i - 1))

        if i < region['depths']:
            zrange_max = region['b'] + (region['tbres'] * i) - eps
        else:
            zrange_max = region['b'] + (region['tbres'] * i)

        # spawn depth layer import job in the background
        #grass.debug("slice %d, <%s>  %% %d" % (band, image[band], band % workers))
        grass.message(_("Processing horizontal slice %d of %d [%.15g,%.15g) ...")
                      % (i, region['depths'], zrange_min, zrange_max))

        proc[i] = grass.start_command('r.in.xyz', input=infile, output=tmp_layer_name,
                                      sep=fs, method=method, x=x, y=y, z=z,
                                      percent=percent, type=data_type,
                                      zrange='%.15g,%.15g' % (zrange_min, zrange_max),
                                      **addl_opts)

        grass.debug("i=%d, %%=%d  (workers=%d)" % (i, i % workers, workers))
        # print sys.getsizeof(proc)  # sizeof(proc array)  [not so big]

        if i % workers is 0:
            # wait for the ones launched so far to finish
            for p_i in depths[:i]:
                pout[p_i] = proc[p_i].communicate()[0]
                if proc[p_i].wait() is not 0:
                    grass.fatal(_("Trouble importing data. Aborting."))

    # wait for jSobs to finish, collect any stray output
    for i in depths:
        pout[i] = proc[i].communicate()[0]
        if proc[i].wait() is not 0:
            grass.fatal(_("Trouble importing data. Aborting."))

    del proc

    grass.verbose(_("Assembling 3D cube ..."))

    # input order: lower most strata first
    slices = grass.read_command('g.list', type='raster', sep=',',
                                pattern='tmp.r3xyz.%d.*' % os.getpid()).rstrip(os.linesep)
    grass.debug(slices)

    try:
        grass.run_command('r.to.rast3', input=slices, output=output)
    except CalledModuleError:
        grass.message(_("Done. 3D raster map <%s> created.") % output)
Example #26
0
def main():
    check_addon_installed('r.object.geometry', fatal=True)

    dev_start = options['development_start']
    dev_end = options['development_end']
    only_file = flags['l']
    patches_per_subregion = flags['s']
    if not only_file:
        repeat = int(options['repeat'])
        compactness_means = [float(each) for each in options['compactness_mean'].split(',')]
        compactness_ranges = [float(each) for each in options['compactness_range'].split(',')]
        discount_factors = [float(each) for each in options['discount_factor'].split(',')]
    patches_file = options['patch_sizes']
    threshold = float(options['patch_threshold'])
    sep = gutils.separator(options['separator'])
    # v.clean removes size <= threshold, we want to keep size == threshold
    threshold -= 1e-6

    # compute cell size
    region = gcore.region()
    res = (region['nsres'] + region['ewres']) / 2.
    coeff = float(gcore.parse_command('g.proj', flags='g')['meters'])
    cell_size = res * res * coeff * coeff

    tmp_name = 'tmp_futures_calib_' + str(os.getpid()) + '_'
    global TMP

    orig_patch_diff = tmp_name + 'orig_patch_diff'
    TMP.append(orig_patch_diff)
    tmp_clump = tmp_name + 'tmp_clump'
    TMP.append(tmp_clump)
    if patches_per_subregion:
        tmp_cat_clump = tmp_name + 'tmp_cat_clump'
        TMP.append(tmp_cat_clump)

    gcore.message(_("Analyzing original patches..."))
    diff_development(dev_start, dev_end, options['subregions'], orig_patch_diff)
    data = write_data = patch_analysis(orig_patch_diff, threshold, tmp_clump)
    if patches_per_subregion:
        subregions_data = patch_analysis_per_subregion(orig_patch_diff, options['subregions'],
                                                       threshold, tmp_clump, tmp_cat_clump)
        # if there is just one column, write the previous analysis result
        if len(subregions_data.keys()) > 1:
            write_data = subregions_data
    write_patches_file(write_data, cell_size, patches_file, sep)

    if only_file:
        return

    area, perimeter = data.T
    compact = compactness(area, perimeter)

    # area histogram
    area = area / cell_size
    bin_width = 1.  # automatic ways to determine bin width do not perform well in this case
    hist_bins_area_orig = int(np.ptp(area) / bin_width)
    hist_range_area_orig = (np.min(area), np.max(area))
    histogram_area_orig, _edges = np.histogram(area, bins=hist_bins_area_orig,
                                               range=hist_range_area_orig, density=True)
    histogram_area_orig = histogram_area_orig * 100  # to get percentage for readability

    # compactness histogram
    bin_width = 0.1
    hist_bins_compactness_orig = int(np.ptp(compact) / bin_width)
    hist_range_compactness_orig = (np.min(compact), np.max(compact))
    histogram_compactness_orig, _edges = np.histogram(compact, bins=hist_bins_compactness_orig,
                                                      range=hist_range_compactness_orig, density=True)
    histogram_compactness_orig = histogram_compactness_orig * 100  # to get percentage for readability

    seed = int(options['random_seed'])
    nprocs = int(options['nprocs'])
    count = 0
    proc_count = 0
    queue_list = []
    proc_list = []
    num_all = len(compactness_means) * len(compactness_ranges) * len(discount_factors)
    with open(options['calibration_results'], 'w') as f:
        for com_mean in compactness_means:
            for com_range in compactness_ranges:
                for discount_factor in discount_factors:
                    count += 1
                    q = Queue()
                    p = Process(target=run_one_combination,
                                args=(count, num_all, repeat, seed, dev_start, com_mean, com_range,
                                      discount_factor, patches_file, options, threshold,
                                      hist_bins_area_orig, hist_range_area_orig, hist_bins_compactness_orig,
                                      hist_range_compactness_orig, cell_size, histogram_area_orig, histogram_compactness_orig,
                                      tmp_name, q))
                    p.start()
                    queue_list.append(q)
                    proc_list.append(p)
                    proc_count += 1
                    seed += 1
                    if proc_count == nprocs or count == num_all:
                        for i in range(proc_count):
                            proc_list[i].join()
                            data = queue_list[i].get()
                            if not data:
                                continue
                            f.write(','.join([str(data['input_discount_factor']), str(data['area_distance']),
                                              str(data['input_compactness_mean']), str(data['input_compactness_range']),
                                              str(data['compactness_distance'])]))
                            f.write('\n')
                        f.flush()
                        proc_count = 0
                        proc_list = []
                        queue_list = []
    # compute combined normalized error
    process_calibration(options['calibration_results'])
Example #27
0
def main():
    infile = options['input']
    output = options['output']
    method = options['method']
    dtype = options['type']
    fs = options['separator']
    x = options['x']
    y = options['y']
    z = options['z']
    value_column = options['value_column']
    vrange = options['vrange']
    vscale = options['vscale']
    percent = options['percent']
    pth = options['pth']
    trim = options['trim']
    workers = int(options['workers'])
    scan_only = flags['s']
    shell_style = flags['g']
    ignore_broken = flags['i']

    if workers == 1 and "WORKERS" in os.environ:
        workers = int(os.environ["WORKERS"])

    if not os.path.exists(infile):
        grass.fatal(_("Unable to read input file <%s>") % infile)

    addl_opts = {}
    if pth:
        addl_opts['pth'] = '%s' % pth
    if trim:
        addl_opts['trim'] = '%s' % trim
    if value_column:
        addl_opts['value_column'] = '%s' % value_column
    if vrange:
        addl_opts['vrange'] = '%s' % vrange
    if vscale:
        addl_opts['vscale'] = '%s' % vscale
    if ignore_broken:
        addl_opts['flags'] = 'i'

    if scan_only or shell_style:
        if shell_style:
            doShell = 'g'
        else:
            doShell = ''
        grass.run_command('r.in.xyz', flags='s' + doShell, input=infile,
                          output='dummy', sep=fs, x=x, y=y, z=z,
                          **addl_opts)
        sys.exit()

    if dtype == 'float':
        data_type = 'FCELL'
    else:
        data_type = 'DCELL'

    region = grass.region(region3d=True)

    if region['nsres'] != region['nsres3'] or region['ewres'] != region['ewres3']:
        grass.run_command('g.region', flags='3p')
        grass.fatal(_("The 2D and 3D region settings are different. Can not continue."))

    grass.verbose(_("Region bottom=%.15g  top=%.15g  vertical_cell_res=%.15g  (%d depths)")
                  % (region['b'], region['t'], region['tbres'], region['depths']))

    grass.verbose(_("Creating slices ..."))

    # to avoid a point which falls exactly on a top bound from being
    # considered twice, we shrink the
    # For the top slice we keep it though, as someone scanning the bounds
    # may have set the bounds exactly to the data extent (a bad idea, but
    # it happens..)
    eps = 1.0e-15

    # if there are thousands of depths hopefully this array doesn't get too
    # large and so we don't have to worry much about storing/looping through
    # all the finished process infos.
    proc = {}
    pout = {}

    depths = list(range(1, 1 + region['depths']))

    for i in depths:
        tmp_layer_name = 'tmp.r3xyz.%d.%s' % (os.getpid(), '%05d' % i)

        zrange_min = region['b'] + (region['tbres'] * (i - 1))

        if i < region['depths']:
            zrange_max = region['b'] + (region['tbres'] * i) - eps
        else:
            zrange_max = region['b'] + (region['tbres'] * i)

        # spawn depth layer import job in the background
        #grass.debug("slice %d, <%s>  %% %d" % (band, image[band], band % workers))
        grass.message(_("Processing horizontal slice %d of %d [%.15g,%.15g) ...")
                      % (i, region['depths'], zrange_min, zrange_max))

        proc[i] = grass.start_command('r.in.xyz', input=infile, output=tmp_layer_name,
                                      sep=fs, method=method, x=x, y=y, z=z,
                                      percent=percent, type=data_type,
                                      zrange='%.15g,%.15g' % (zrange_min, zrange_max),
                                      **addl_opts)

        grass.debug("i=%d, %%=%d  (workers=%d)" % (i, i % workers, workers))
        # print sys.getsizeof(proc)  # sizeof(proc array)  [not so big]

        if i % workers == 0:
            # wait for the ones launched so far to finish
            for p_i in depths[:i]:
                pout[p_i] = proc[p_i].communicate()[0]
                if proc[p_i].wait() != 0:
                    grass.fatal(_("Trouble importing data. Aborting."))

    # wait for jSobs to finish, collect any stray output
    for i in depths:
        pout[i] = proc[i].communicate()[0]
        if proc[i].wait() != 0:
            grass.fatal(_("Trouble importing data. Aborting."))

    del proc

    grass.verbose(_("Assembling 3D cube ..."))

    # input order: lower most strata first
    slices = grass.read_command('g.list', type='raster', sep=',',
                                pattern='tmp.r3xyz.%d.*' % os.getpid()).rstrip(os.linesep)
    grass.debug(slices)

    try:
        grass.run_command('r.to.rast3', input=slices, output=output)
    except CalledModuleError:
        grass.message(_("Done. 3D raster map <%s> created.") % output)
Example #28
0
def main():
    infile = options["input"]
    output = options["output"]
    method = options["method"]
    dtype = options["type"]
    fs = options["separator"]
    x = options["x"]
    y = options["y"]
    z = options["z"]
    value_column = options["value_column"]
    vrange = options["vrange"]
    vscale = options["vscale"]
    percent = options["percent"]
    pth = options["pth"]
    trim = options["trim"]
    workers = int(options["workers"])
    scan_only = flags["s"]
    shell_style = flags["g"]
    ignore_broken = flags["i"]

    if workers == 1 and "WORKERS" in os.environ:
        workers = int(os.environ["WORKERS"])

    if not os.path.exists(infile):
        grass.fatal(_("Unable to read input file <%s>") % infile)

    addl_opts = {}
    if pth:
        addl_opts["pth"] = "%s" % pth
    if trim:
        addl_opts["trim"] = "%s" % trim
    if value_column:
        addl_opts["value_column"] = "%s" % value_column
    if vrange:
        addl_opts["vrange"] = "%s" % vrange
    if vscale:
        addl_opts["vscale"] = "%s" % vscale
    if ignore_broken:
        addl_opts["flags"] = "i"

    if scan_only or shell_style:
        if shell_style:
            doShell = "g"
        else:
            doShell = ""
        grass.run_command(
            "r.in.xyz",
            flags="s" + doShell,
            input=infile,
            output="dummy",
            sep=fs,
            x=x,
            y=y,
            z=z,
            **addl_opts,
        )
        sys.exit()

    if dtype == "float":
        data_type = "FCELL"
    else:
        data_type = "DCELL"

    region = grass.region(region3d=True)

    if region["nsres"] != region["nsres3"] or region["ewres"] != region[
            "ewres3"]:
        grass.run_command("g.region", flags="3p")
        grass.fatal(
            _("The 2D and 3D region settings are different. Can not continue.")
        )

    grass.verbose(
        _("Region bottom=%.15g  top=%.15g  vertical_cell_res=%.15g  (%d depths)"
          ) % (region["b"], region["t"], region["tbres"], region["depths"]))

    grass.verbose(_("Creating slices ..."))

    # to avoid a point which falls exactly on a top bound from being
    # considered twice, we shrink the
    # For the top slice we keep it though, as someone scanning the bounds
    # may have set the bounds exactly to the data extent (a bad idea, but
    # it happens..)
    eps = 1.0e-15

    # if there are thousands of depths hopefully this array doesn't get too
    # large and so we don't have to worry much about storing/looping through
    # all the finished process infos.
    proc = {}
    pout = {}

    depths = list(range(1, 1 + region["depths"]))

    for i in depths:
        tmp_layer_name = "tmp.r3xyz.%d.%s" % (os.getpid(), "%05d" % i)

        zrange_min = region["b"] + (region["tbres"] * (i - 1))

        if i < region["depths"]:
            zrange_max = region["b"] + (region["tbres"] * i) - eps
        else:
            zrange_max = region["b"] + (region["tbres"] * i)

        # spawn depth layer import job in the background
        # grass.debug("slice %d, <%s>  %% %d" % (band, image[band], band % workers))
        grass.message(
            _("Processing horizontal slice %d of %d [%.15g,%.15g) ...") %
            (i, region["depths"], zrange_min, zrange_max))

        proc[i] = grass.start_command(
            "r.in.xyz",
            input=infile,
            output=tmp_layer_name,
            sep=fs,
            method=method,
            x=x,
            y=y,
            z=z,
            percent=percent,
            type=data_type,
            zrange="%.15g,%.15g" % (zrange_min, zrange_max),
            **addl_opts,
        )

        grass.debug("i=%d, %%=%d  (workers=%d)" % (i, i % workers, workers))
        # print sys.getsizeof(proc)  # sizeof(proc array)  [not so big]

        if i % workers == 0:
            # wait for the ones launched so far to finish
            for p_i in depths[:i]:
                pout[p_i] = proc[p_i].communicate()[0]
                if proc[p_i].wait() != 0:
                    grass.fatal(_("Trouble importing data. Aborting."))

    # wait for jSobs to finish, collect any stray output
    for i in depths:
        pout[i] = proc[i].communicate()[0]
        if proc[i].wait() != 0:
            grass.fatal(_("Trouble importing data. Aborting."))

    del proc

    grass.verbose(_("Assembling 3D cube ..."))

    # input order: lower most strata first
    slices = grass.read_command("g.list",
                                type="raster",
                                sep=",",
                                pattern="tmp.r3xyz.%d.*" % os.getpid()).rstrip(
                                    os.linesep)
    grass.debug(slices)

    try:
        grass.run_command("r.to.rast3", input=slices, output=output)
    except CalledModuleError:
        grass.message(_("Done. 3D raster map <%s> created.") % output)
Example #29
0
def main():
    # Take into account those extra pixels we'll be a addin'
    max_cols = int(options['maxcols']) - int(options['overlap'])
    max_rows = int(options['maxrows']) - int(options['overlap'])

    if max_cols == 0:
        gcore.fatal(
            _("It is not possible to set 'maxcols=%s' and "
              "'overlap=%s'. Please set maxcols>overlap" %
              (options['maxcols'], options['overlap'])))
    elif max_rows == 0:
        gcore.fatal(
            _("It is not possible to set 'maxrows=%s' and "
              "'overlap=%s'. Please set maxrows>overlap" %
              (options['maxrows'], options['overlap'])))
    # destination projection
    if not options['destproj']:
        dest_proj = gcore.read_command('g.proj', quiet=True,
                                       flags='jf').rstrip('\n')
        if not dest_proj:
            gcore.fatal(_('g.proj failed'))
    else:
        dest_proj = options['destproj']
    gcore.debug("Getting destination projection -> '%s'" % dest_proj)

    # projection scale
    if not options['destscale']:
        ret = gcore.parse_command('g.proj', quiet=True, flags='j')
        if not ret:
            gcore.fatal(_('g.proj failed'))

        if '+to_meter' in ret:
            dest_scale = ret['+to_meter'].strip()
        else:
            gcore.warning(
                _("Scale (%s) not found, assuming '1'") % '+to_meter')
            dest_scale = '1'
    else:
        dest_scale = options['destscale']
    gcore.debug('Getting destination projection scale -> %s' % dest_scale)

    # set up the projections
    srs_source = {
        'proj': options['sourceproj'],
        'scale': float(options['sourcescale'])
    }
    srs_dest = {'proj': dest_proj, 'scale': float(dest_scale)}

    if options['region']:
        gcore.run_command('g.region', quiet=True, region=options['region'])
    dest_bbox = gcore.region()
    gcore.debug('Getting destination region')

    # output field separator
    fs = separator(options['separator'])

    # project the destination region into the source:
    gcore.verbose('Projecting destination region into source...')
    dest_bbox_points = bboxToPoints(dest_bbox)

    dest_bbox_source_points, errors_dest = projectPoints(dest_bbox_points,
                                                         source=srs_dest,
                                                         dest=srs_source)

    if len(dest_bbox_source_points) == 0:
        gcore.fatal(
            _("There are no tiles available. Probably the output "
              "projection system it is not compatible with the "
              "projection of the current location"))

    source_bbox = pointsToBbox(dest_bbox_source_points)

    gcore.verbose('Projecting source bounding box into destination...')

    source_bbox_points = bboxToPoints(source_bbox)

    source_bbox_dest_points, errors_source = projectPoints(source_bbox_points,
                                                           source=srs_source,
                                                           dest=srs_dest)

    x_metric = 1 / dest_bbox['ewres']
    y_metric = 1 / dest_bbox['nsres']

    gcore.verbose('Computing length of sides of source bounding box...')

    source_bbox_dest_lengths = sideLengths(source_bbox_dest_points, x_metric,
                                           y_metric)

    # Find the skewedness of the two directions.
    # Define it to be greater than one
    # In the direction (x or y) in which the world is least skewed (ie north south in lat long)
    # Divide the world into strips. These strips are as big as possible contrained by max_
    # In the other direction do the same thing.
    # There's some recomputation of the size of the world that's got to come in
    # here somewhere.

    # For now, however, we are going to go ahead and request more data than is necessary.
    # For small regions far from the critical areas of projections this makes very little difference
    # in the amount of data gotten.
    # We can make this efficient for big regions or regions near critical
    # points later.

    bigger = []
    bigger.append(max(source_bbox_dest_lengths['x']))
    bigger.append(max(source_bbox_dest_lengths['y']))
    maxdim = (max_cols, max_rows)

    # Compute the number and size of tiles to use in each direction
    # I'm making fairly even sized tiles
    # They differer from each other in height and width only by one cell
    # I'm going to make the numbers all simpler and add this extra cell to
    # every tile.

    gcore.message(_('Computing tiling...'))
    tiles = [-1, -1]
    tile_base_size = [-1, -1]
    tiles_extra_1 = [-1, -1]
    tile_size = [-1, -1]
    tileset_size = [-1, -1]
    tile_size_overlap = [-1, -1]
    for i in range(len(bigger)):
        # make these into integers.
        # round up
        bigger[i] = int(bigger[i] + 1)
        tiles[i] = int((bigger[i] / maxdim[i]) + 1)
        tile_size[i] = tile_base_size[i] = int(bigger[i] / tiles[i])
        tiles_extra_1[i] = int(bigger[i] % tiles[i])
        # This is adding the extra pixel (remainder) to all of the tiles:
        if tiles_extra_1[i] > 0:
            tile_size[i] = tile_base_size[i] + 1
        tileset_size[i] = int(tile_size[i] * tiles[i])
        # Add overlap to tiles (doesn't effect tileset_size
        tile_size_overlap[i] = tile_size[i] + int(options['overlap'])

    gcore.verbose("There will be %d by %d tiles each %d by %d cells" %
                  (tiles[0], tiles[1], tile_size[0], tile_size[1]))

    ximax = tiles[0]
    yimax = tiles[1]

    min_x = source_bbox['w']
    min_y = source_bbox['s']
    max_x = source_bbox['e']
    max_y = source_bbox['n']
    span_x = (max_x - min_x)
    span_y = (max_y - min_y)

    xi = 0
    tile_bbox = {'w': -1, 's': -1, 'e': -1, 'n': -1}

    if errors_dest > 0:
        gcore.warning(
            _("During computation %i tiles could not be created" %
              errors_dest))

    while xi < ximax:
        tile_bbox['w'] = float(min_x) + (float(xi) * float(
            tile_size[0]) / float(tileset_size[0])) * float(span_x)
        tile_bbox['e'] = float(min_x) + (float(xi + 1) * float(
            tile_size_overlap[0]) / float(tileset_size[0])) * float(span_x)
        yi = 0
        while yi < yimax:
            tile_bbox['s'] = float(min_y) + (float(yi) * float(
                tile_size[1]) / float(tileset_size[1])) * float(span_y)
            tile_bbox['n'] = float(min_y) + (float(yi + 1) * float(
                tile_size_overlap[1]) / float(tileset_size[1])) * float(span_y)
            tile_bbox_points = bboxToPoints(tile_bbox)
            tile_dest_bbox_points, errors = projectPoints(tile_bbox_points,
                                                          source=srs_source,
                                                          dest=srs_dest)
            tile_dest_bbox = pointsToBbox(tile_dest_bbox_points)
            if bboxesIntersect(tile_dest_bbox, dest_bbox):
                if flags['w']:
                    print("bbox=%s,%s,%s,%s&width=%s&height=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                elif flags['g']:
                    print("w=%s;s=%s;e=%s;n=%s;cols=%s;rows=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                else:
                    print("%s%s%s%s%s%s%s%s%s%s%s" %
                          (tile_bbox['w'], fs, tile_bbox['s'], fs,
                           tile_bbox['e'], fs, tile_bbox['n'], fs,
                           tile_size_overlap[0], fs, tile_size_overlap[1]))
            yi += 1
        xi += 1
Example #30
0
def main():
    check_addon_installed("r.object.geometry", fatal=True)

    dev_start = options["development_start"]
    dev_end = options["development_end"]
    only_file = flags["l"]
    nprocs = int(options["nprocs"])
    patches_per_subregion = flags["s"]
    if not only_file:
        repeat = int(options["repeat"])
        compactness_means = [
            float(each) for each in options["compactness_mean"].split(",")
        ]
        compactness_ranges = [
            float(each) for each in options["compactness_range"].split(",")
        ]
        discount_factors = [
            float(each) for each in options["discount_factor"].split(",")
        ]
    patches_file = options["patch_sizes"]
    threshold = float(options["patch_threshold"])
    sep = gutils.separator(options["separator"])
    # v.clean removes size <= threshold, we want to keep size == threshold
    threshold -= 1e-6

    # compute cell size
    region = gcore.region()
    res = (region["nsres"] + region["ewres"]) / 2.0
    coeff = float(gcore.parse_command("g.proj", flags="g")["meters"])
    cell_size = res * res * coeff * coeff

    tmp_name = "tmp_futures_calib_" + str(os.getpid()) + "_"
    global TMP

    orig_patch_diff = tmp_name + "orig_patch_diff"
    TMP.append(orig_patch_diff)
    tmp_clump = tmp_name + "tmp_clump"
    TMP.append(tmp_clump)
    if patches_per_subregion:
        tmp_cat_clump = tmp_name + "tmp_cat_clump"
        TMP.append(tmp_cat_clump)

    gcore.message(_("Analyzing original patches..."))
    diff_development(dev_start, dev_end, options["subregions"],
                     orig_patch_diff)
    data = write_data = patch_analysis(orig_patch_diff, threshold, tmp_clump)
    if patches_per_subregion:
        subregions_data = patch_analysis_per_subregion_parallel(
            orig_patch_diff,
            options["subregions"],
            threshold,
            tmp_clump,
            tmp_name,
            nprocs,
        )
        # if there is just one column, write the previous analysis result
        if len(subregions_data.keys()) > 1:
            write_data = subregions_data
    write_patches_file(write_data, cell_size, patches_file, sep)

    if only_file:
        return

    area, perimeter = data.T
    compact = compactness(area, perimeter)

    # area histogram
    area = area / cell_size
    bin_width = (
        1.0  # automatic ways to determine bin width do not perform well in this case
    )
    hist_bins_area_orig = int(np.ptp(area) / bin_width)
    hist_range_area_orig = (np.min(area), np.max(area))
    histogram_area_orig, _edges = np.histogram(area,
                                               bins=hist_bins_area_orig,
                                               range=hist_range_area_orig,
                                               density=True)
    histogram_area_orig = histogram_area_orig * 100  # to get percentage for readability

    # compactness histogram
    bin_width = 0.1
    hist_bins_compactness_orig = int(np.ptp(compact) / bin_width)
    hist_range_compactness_orig = (np.min(compact), np.max(compact))
    histogram_compactness_orig, _edges = np.histogram(
        compact,
        bins=hist_bins_compactness_orig,
        range=hist_range_compactness_orig,
        density=True,
    )
    histogram_compactness_orig = (histogram_compactness_orig * 100
                                  )  # to get percentage for readability

    seed = int(options["random_seed"])
    count = 0
    proc_count = 0
    queue_list = []
    proc_list = []
    num_all = len(compactness_means) * len(compactness_ranges) * len(
        discount_factors)
    with open(options["calibration_results"], "w") as f:
        for com_mean in compactness_means:
            for com_range in compactness_ranges:
                for discount_factor in discount_factors:
                    count += 1
                    q = Queue()
                    p = Process(
                        target=run_one_combination,
                        args=(
                            count,
                            num_all,
                            repeat,
                            seed,
                            dev_start,
                            com_mean,
                            com_range,
                            discount_factor,
                            patches_file,
                            options,
                            threshold,
                            hist_bins_area_orig,
                            hist_range_area_orig,
                            hist_bins_compactness_orig,
                            hist_range_compactness_orig,
                            cell_size,
                            histogram_area_orig,
                            histogram_compactness_orig,
                            tmp_name,
                            q,
                        ),
                    )
                    p.start()
                    queue_list.append(q)
                    proc_list.append(p)
                    proc_count += 1
                    seed += 1
                    if proc_count == nprocs or count == num_all:
                        for i in range(proc_count):
                            proc_list[i].join()
                            data = queue_list[i].get()
                            if not data:
                                continue
                            f.write(",".join([
                                str(data["input_discount_factor"]),
                                str(data["area_distance"]),
                                str(data["input_compactness_mean"]),
                                str(data["input_compactness_range"]),
                                str(data["compactness_distance"]),
                            ]))
                            f.write("\n")
                        f.flush()
                        proc_count = 0
                        proc_list = []
                        queue_list = []
    # compute combined normalized error
    process_calibration(options["calibration_results"])