def main():

    #create a list for parallel mapcalc modules and a mapcalc module to act as template 
    mapcalc_list = []
    mapcalc = Module("r.mapcalc", overwrite=True, run_=False)
    
    #get number of tiles in each row and col from arguments 
    tile_rows = int(sys.argv[1])
    tile_cols = int(sys.argv[2])
    
    #Create que for parallel processes
    queue = ParallelModuleQueue(nprocs=sys.argv[3])
    
    #Use temporary region that will be reset after execution of this script
    gscript.use_temp_region()
    
    #Input raster (can be grass raster dataset or externally linked dataset such as tiff vrt etc.)
    input="input_raster"
    
    #Read raster boundaries and resolution into numeric variables
    info = gscript.raster_info(input)
    no = float(info['north'])
    so = float(info['south'])
    we = float(info['west'])
    ea = float(info['east'])
    ro = int(info['rows'])
    co = int(info['cols'])
    ewr = int(info['ewres'])
    nsr = int(info['nsres'])

    #Start mapcalc module for each tile
    k = 0
    for i in xrange(tile_rows):
        for j in xrange(tile_cols):
            #Set processing region to specific part of the raster (column+row)
            gscript.run_command('g.region', 
                    n=so+(i+1)*ro/tile_rows*nsr, 
                    s=so+i*ro/tile_rows*nsr, 
                    e=we+(1+j)*co/tile_cols*ewr, 
                    w=we+j*co/tile_cols*ewr, 
                    rows=ro/tile_rows, 
                    cols=co/tile_cols, 
                    nsres=nsr, 
                    ewres=ewr)
            #Create a copy of mapcalc template, give it mapcalc expression and put it into parallel que where it will be executed when a process becomes available.
            new_mapcalc = copy.deepcopy(mapcalc)
            mapcalc_list.append(new_mapcalc)
            m = new_mapcalc(expression="test_pygrass_%i = %s * (%i+1)"%(k,input, k))
            queue.put(m)
            k+=1
    #wait for all mapcalc modules to have finished execution
    queue.wait()

    #print mapcalc returncodes to check that everything went as expected
    for mapcalc in mapcalc_list:
        print(mapcalc.popen.returncode)

    #delete temporary region to restore the region that was in use at the start of the script
    gscript.del_temp_region()    
Exemple #2
0
def import_files(directory):
    start = time.time()

    # queue for parallel jobs
    queue = ParallelModuleQueue(int(options['nprocs']))

    import_module = Module('v.in.lidar',
                           flags='otb',
                           overwrite=True,
                           run_=False)

    maps = []
    for f in os.listdir(directory):
        if os.path.splitext(f)[1] != '.laz':
            continue
        fullname = os.path.join(directory, f)
        basename = os.path.basename(f)
        # '-' is not valid for vector map names
        # vector map names cannot start with number
        mapname = os.path.splitext(basename)[0].replace('-', '_')

        maps.append(mapname)
        gs.message("Importing <{}>...".format(fullname))
        import_task = deepcopy(import_module)
        queue.put(import_task(input=fullname, output=mapname))

    queue.wait()

    if not maps:
        gs.fatal("No input files found")

    return maps
Exemple #3
0
def get_rst_csv(rasters,
                zones,
                csvfiles,
                percentile=90.0,
                overwrite=False,
                nprocs=1,
                separator=";"):
    queue = ParallelModuleQueue(nprocs=nprocs)
    for rast, csv in zip(rasters, csvfiles):
        print(rast, csv)
        queue.put(
            Module(
                "r.univar2",
                map=rast,
                zones=zones,
                percentile=percentile,
                output=csv,
                separator=separator,
                overwrite=overwrite,
                flags="et",
                run_=False,
            ))
    # wait the end of all process
    queue.wait()
    return csvfiles
Exemple #4
0
    def parallel_process(self, points):
        # Run the viewshed for a few points
        vshed_list = []
        # Set up a processing queue
        queue = ParallelModuleQueue(nprocs=self.ncores)

        # Create a PyGRASS Module to run the viewshed calc
        viewshed_calc = Module('r.viewshed',
                               overwrite=True,
                               run_=False,
                               observer_elevation=self.OBS_ELEVATION,
                               memory=self.MEM_MBYTES,
                               refraction_coeff=self.REFRACTION_COEF,
                               input=self.elevation_name,
                               quiet=True)
        stime = time.time()
        gscript.message('Queueing %s viewpoint calculations...' % len(points))
        for site in points:
            ptname = site[2]
            tmpdir = os.path.join(os.environ['TMPDIR'], 'viewshed_%s' % ptname)
            if not os.path.exists(tmpdir):
                os.makedirs(tmpdir)
            # gscript.verbose(_('Calculating viewshed for location %s,%s (point name = %s)') % (site[0], site[1], ptname))
            tempry = "vshed_%s" % ptname
            vshed_list.append(tempry)
            new_viewshed_calc = copy.deepcopy(viewshed_calc)
            vs = new_viewshed_calc(output=tempry,
                                   coordinates=(site[0], site[1]),
                                   directory=tmpdir)
            queue.put(vs)
        queue.wait()
        etime = time.time()
        self.print_timing(stime, etime, len(points))
        return vshed_list
Exemple #5
0
def main():

    #create a list for parallel mapcalc modules and a mapcalc module to act as template
    mapcalc_list = []
    mapcalc = Module("r.mapcalc", overwrite=True, run_=False)

    #get number of tiles in each row and col from arguments
    tile_rows = int(sys.argv[1])
    tile_cols = int(sys.argv[2])

    #Create que for parallel processes
    queue = ParallelModuleQueue(nprocs=sys.argv[3])

    #Use temporary region that will be reset after execution of this script
    gscript.use_temp_region()

    input = "input_raster"

    #Read raster boundaries and resolution into numeric variables
    info = gscript.raster_info(input)
    no = float(info['north'])
    so = float(info['south'])
    we = float(info['west'])
    ea = float(info['east'])
    ro = int(info['rows'])
    co = int(info['cols'])
    ewr = int(info['ewres'])
    nsr = int(info['nsres'])

    #Start mapcalc module for each tile
    k = 0
    for i in xrange(tile_rows):
        for j in xrange(tile_cols):
            #Set processing region to specific part of the raster (column+row)
            gscript.run_command('g.region',
                                n=so + (i + 1) * ro / tile_rows * nsr,
                                s=so + i * ro / tile_rows * nsr,
                                e=we + (1 + j) * co / tile_cols * ewr,
                                w=we + j * co / tile_cols * ewr,
                                rows=ro / tile_rows,
                                cols=co / tile_cols,
                                nsres=nsr,
                                ewres=ewr)
            #Create a copy of mapcalc template, give it mapcalc expression and put it into parallel que where it will be executed when a process becomes available.
            new_mapcalc = copy.deepcopy(mapcalc)
            mapcalc_list.append(new_mapcalc)
            m = new_mapcalc(expression="test_pygrass_%i = %s * (%i+1)" %
                            (k, input, k))
            queue.put(m)
            k += 1
    #wait for all mapcalc modules to have finished execution
    queue.wait()

    #print mapcalc returncodes to check that everything went as expected
    for mapcalc in mapcalc_list:
        print(mapcalc.popen.returncode)

    #delete temporary region to restore the region that was in use at the start of the script
    gscript.del_temp_region()
def main(elevation):
    # compute slope and aspect for r.sun
    Module('r.slope.aspect', elevation=elevation, aspect='aspect', slope='slope', overwrite=True)
    # initialize an empty queue and list
    queue = ParallelModuleQueue(nprocs=44)
    sun_name = 'sun_day{}_t{}'
    # set computational region
    Module('g.region', raster=elevation)
    # initialize a module instance with shared inputs
    for t in range(5,20):
        for d in range(1,366):
            print(d)
            sun = Module('r.sun.mp', elevation=elevation, slope='slope', aspect='aspect',
                         glob_rad='glob', time=t, step=1, day=d, threads=16, run_=False, overwrite=True)
                # create a copy of the module and set the remaining parameters
                
            m = deepcopy(sun)(glob_rad=sun_name.format(d,t), time=t)
            queue.put(m)
        queue.wait()
    Module('r.gdal.out', createopt="COMPRESS=LZW", input=[sun_name.format(d,t) for t in range(5, 20)], 
           output='/mnt/tyson_swetnam/Data/goldwater/[sun_name.format(d,t) for t in range(5, 20)]', format='Gtiff', overwrite=True)
    # set color table
    Module('r.colors', map=[sun_name.format(d,t) for t in range(5, 20)], color='grey', flags='e')
Exemple #7
0
def union_for_all_pairs(inputList):
    """
    Calculates the geometric union of the overlayed polygon layers 
    for all pairs in inputList
    
    THis is not a good idea! It is only an example!
    """

    import copy
    from grass.pygrass.modules import Module, ParallelModuleQueue

    op_list = []

    unionTool = Module("v.overlay",
                       atype="area",
                       btype="area",
                       operator="or",
                       overwrite=True,
                       run_=False,
                       quiet=True)

    qq = ParallelModuleQueue(nprocs=5)
    outputs = []
    for lyr_a, lyr_b in inputList:
        new_union = copy.deepcopy(unionTool)
        op_list.append(new_union)

        un_result = "{}_{}".format(lyr_a, lyr_b)
        nu = new_union(ainput=lyr_a, binput=lyr_b, ouput=un_result)

        qq.put(nu)
        outputs.append(un_result)

    qq.wait()

    return outputs
Exemple #8
0
def main():

    settings = options["settings"]
    scene_names = options["scene_name"].split(",")
    output = options["output"]
    nprocs = int(options["nprocs"])
    clouds = int(options["clouds"])
    producttype = options["producttype"]
    start = options["start"]
    end = options["end"]
    datasource = options["datasource"]
    use_scenenames = flags["s"]
    ind_folder = flags["f"]

    if datasource == "USGS_EE" and producttype != "S2MSI1C":
        grass.fatal(
            _("Download from USGS Earth Explorer only supports "
              "Sentinel-2 Level 1C data (S2MSI1C)"))
    elif datasource == "GCS" and producttype not in ["S2MSI2A", "S2MSI1C"]:
        grass.fatal(
            _("Download from GCS only supports Sentinel-2 Level"
              "1C (S2MSI1C) or 2A (S2MSI2A)"))

    # check if we have the i.sentinel.download + i.sentinel.import addons
    if not grass.find_program("i.sentinel.download", "--help"):
        grass.fatal(
            _("The 'i.sentinel.download' module was not found, "
              "install it first: \n g.extension i.sentinel"))

    # Test if all required data are there
    if not os.path.isfile(settings):
        grass.fatal(_("Settings file <{}> not found").format(settings))

    # set some common environmental variables, like:
    os.environ.update(
        dict(
            GRASS_COMPRESS_NULLS="1",
            GRASS_COMPRESSOR="ZSTD",
            GRASS_MESSAGE_FORMAT="plain",
        ))

    # test nprocs Settings
    if nprocs > mp.cpu_count():
        grass.warning(
            _("Using {} parallel processes but only {} CPUs available."
              "Setting nprocs to {}").format(nprocs, mp.cpu_count(),
                                             mp.cpu_count() - 1))
        nprocs = mp.cpu_cound() - 1

    # sentinelsat allows only three parallel downloads
    elif nprocs > 2 and options["datasource"] == "ESA_COAH":
        grass.message(
            _("Maximum number of parallel processes for Downloading"
              " fixed to 2 due to sentinelsat API restrictions"))
        nprocs = 2

    # usgs allows maximum 10 parallel downloads
    elif nprocs > 10 and options["dataseource"] == "USGS_EE":
        grass.message(
            _("Maximum number of parallel processes for Downloading"
              " fixed to 10 due to Earth Explorer restrictions"))
        nprocs = 10

    if use_scenenames:
        scenenames = scene_names
        # check if the filename is valid
        # usgs scenename format will be checked in i.sentinel.download
        if datasource == "ESA_COAH":
            for scene in scenenames:
                if len(scene) < 10 or not scene.startswith("S2"):
                    grass.fatal(
                        _("Please provide scenenames in the format"
                          " S2X_LLLLLL_YYYYMMDDTHHMMSS_"
                          "NYYYY_RZZZ_TUUUUU_YYYYMMDDTHHMMSS.SAFE"))
    else:
        # get a list of scenenames to download
        download_args = {
            "settings": settings,
            "producttype": producttype,
            "start": start,
            "end": end,
            "clouds": clouds,
            "datasource": datasource,
            "flags": "l",
        }
        if options["limit"]:
            download_args["limit"] = options["limit"]
        i_sentinel_download_string = grass.parse_command(
            "i.sentinel.download", **download_args)
        i_sentinel_keys = i_sentinel_download_string.keys()
        scenenames = [item.split(" ")[1] for item in i_sentinel_keys]
    # parallelize download
    grass.message(_("Downloading Sentinel-2 data..."))

    # adapt nprocs to number of scenes
    nprocs_final = min(len(scenenames), nprocs)
    queue_download = ParallelModuleQueue(nprocs=nprocs_final)
    for idx, scenename in enumerate(scenenames):
        producttype, start_date, end_date, query_string = scenename_split(
            scenename, datasource, flags["e"])
        # output into separate folders, easier to import in a parallel way:
        if ind_folder:
            outpath = os.path.join(output, "dl_s2_%s" % str(idx + 1))
        else:
            outpath = output
        i_sentinel_download = Module(
            "i.sentinel.download",
            settings=settings,
            start=start_date,
            end=end_date,
            producttype=producttype,
            query=query_string,
            output=outpath,
            datasource=datasource,
            run_=False,
        )
        queue_download.put(i_sentinel_download)
    queue_download.wait()
def main():

    # Inspiration: https://medium.com/sentinel-hub/how-to-create-cloudless-mosaics-37910a2b8fa8

    global rm_regions, rm_rasters, rm_vectors, rm_strds
    global name, scene_keys

    # parameters
    strds = options['input']
    clouds = options['clouds']
    shadows = options['shadows']
    strdsout = options['output']
    nprocs = int(options['nprocs'])

    test_nprocs()

    # test if necessary GRASS GIS addons are installed
    if not grass.find_program('r.series.lwr', '--help'):
        grass.fatal(
            _("The 'r.series.lwr' module was not found, install it first:") +
            "\n" + "g.extension r.series.lwr")
    if not grass.find_program('i.histo.match', '--help'):
        grass.fatal(
            _("The 'i.histo.match' module was not found, install it first:") +
            "\n" + "g.extension i.histo.match")

    strdsrasters = [
        x.split('|')[0]
        for x in grass.parse_command('t.rast.list', input=strds, flags='u')
    ]
    strdstimes = [
        x.split('|')[2]
        for x in grass.parse_command('t.rast.list', input=strds, flags='u')
    ]
    if clouds:
        cloudrasters = [
            x.split('|')[0] for x in grass.parse_command(
                't.rast.list', input=clouds, flags='u')
        ]
        cloudtimes = [
            x.split('|')[2] for x in grass.parse_command(
                't.rast.list', input=clouds, flags='u')
        ]
        if len(strdsrasters) != len(cloudrasters):
            grass.warning(
                _("Number of raster in <input> strds and <clouds> strds are not the same."
                  ))
    if shadows:
        shadowrasters = [
            x.split('|')[0] for x in grass.parse_command(
                't.rast.list', input=shadows, flags='u')
        ]
        shadowtimes = [
            x.split('|')[2] for x in grass.parse_command(
                't.rast.list', input=shadows, flags='u')
        ]
        if len(strdsrasters) != len(shadowrasters):
            grass.warning(
                _("Number of raster in <input> strds and <shadows> strds are not the same."
                  ))

    scenes = dict()
    for strdsrast, strdstime in zip(strdsrasters, strdstimes):
        scenes[strdsrast] = {'raster': strdsrast, 'date': strdstime}
        if clouds:
            if strdstime in cloudtimes:
                cloud_idx = cloudtimes.index(strdstime)
                scenes[strdsrast]['clouds'] = cloudrasters[cloud_idx]
            else:
                grass.warning(
                    _("For <%s> at <%s> no clouds found") %
                    (strdsrast, strdstime))
        if shadows:
            if strdstime in shadowtimes:
                shadow_idx = shadowtimes.index(strdstime)
                scenes[strdsrast]['shadows'] = shadowrasters[shadow_idx]
            else:
                grass.warning(
                    _("For <%s> at <%s> no clouds found") %
                    (strdsrast, strdstime))
    scene_keys = 'raster'

    num_scenes = len(scenes)
    if options['clouds']:
        # parallelize
        queue_mapcalc = ParallelModuleQueue(nprocs=nprocs)
        scene_keys = 'noclouds'
        grass.message(_("Set clouds in rasters to null() ..."))
        for scene_key, num in zip(scenes, range(num_scenes)):
            grass.message(_("Scene %d of %d ...") % (num + 1, num_scenes))
            scene = scenes[scene_key]
            if options['cloudbuffer']:
                noclouds = "%s_nocloudstmp" % scene['raster']
            else:
                noclouds = "%s_noclouds" % scene['raster']
            scenes[scene_key]['noclouds'] = noclouds
            rm_rasters.append(noclouds)
            expression = ("%s = if( isnull(%s), %s, null() )" %
                          (noclouds, scene['clouds'], scene['raster']))

            # grass.run_command('r.mapcalc', expression=expression, quiet=True)
            module_mapcalc = Module('r.mapcalc',
                                    expression=expression,
                                    run_=False)
            queue_mapcalc.put(module_mapcalc)
        queue_mapcalc.wait()

        # buffer
        if options['cloudbuffer']:
            # parallelize
            queue_buffer = ParallelModuleQueue(nprocs=nprocs)
            for scene_key, num in zip(scenes, range(num_scenes)):
                grass.message(
                    _("Cloud buffer %d of %d ...") % (num + 1, num_scenes))
                scene = scenes[scene_key]
                noclouds = scenes[scene_key]['noclouds']
                noclouds_buf = "%s_noclouds" % scene['raster']
                scenes[scene_key]['noclouds'] = noclouds_buf
                rm_rasters.append(noclouds_buf)
                if float(options['cloudbuffer']) < 0:
                    buffer = float(options['cloudbuffer'])
                else:
                    buffer = -1.0 * float(options['cloudbuffer'])

                # grass.run_command('r.grow', input=noclouds, output=noclouds_buf, radius=buffer, quiet=True)
                module_buffer = Module('r.grow',
                                       input=noclouds,
                                       output=noclouds_buf,
                                       radius=buffer,
                                       run_=False)
                queue_buffer.put(module_buffer)
            queue_buffer.wait()

    if options['shadows']:
        # parallelize
        queue_mapcalc = ParallelModuleQueue(nprocs=nprocs)
        old_key = scene_keys
        scene_keys = 'noshadows'
        grass.message(_("Set shadows in rasters to null() ..."))
        for scene_key, num in zip(scenes, range(num_scenes)):
            grass.message(_("Scene %d of %d ...") % (num + 1, num_scenes))
            scene = scenes[scene_key]
            if options['shadowbuffer']:
                noshadows = "%s_noshadowtmp" % scene['raster']
            else:
                noshadows = "%s_noshadows" % scene['raster']
            rm_rasters.append(noshadows)
            scenes[scene_key]['noshadows'] = noshadows
            expression = ("%s = if( isnull(%s), %s, null() )" %
                          (noshadows, scene['shadows'], scene[old_key]))
            # grass.run_command('r.mapcalc', expression=expression, quiet=True)
            module_mapcalc = Module('r.mapcalc',
                                    expression=expression,
                                    run_=False)
            queue_mapcalc.put(module_mapcalc)
        queue_mapcalc.wait()

        # buffer
        if options['shadowbuffer']:
            # parallelize
            queue_buffer = ParallelModuleQueue(nprocs=nprocs)
            for scene_key, num in zip(scenes, range(num_scenes)):
                grass.message(
                    _("Shadow buffer %d of %d ...") % (num + 1, num_scenes))
                scene = scenes[scene_key]
                noshadows = scenes[scene_key]['noshadows']
                noshadows_buf = "%s_noshadows" % scene['raster']
                scenes[scene_key]['noshadows'] = noshadows_buf
                rm_rasters.append(
                    noshadows_buf
                )  # TODO das hier scheint nicht zu funktionieren?!
                if float(options['shadowbuffer']) < 0:
                    buffer = float(options['shadowbuffer'])
                else:
                    buffer = -1.0 * float(options['shadowbuffer'])

                # grass.run_command('r.grow', input=noshadows, output=noshadows_buf, radius=buffer, quiet=True)
                module_buffer = Module('r.grow',
                                       input=noshadows,
                                       output=noshadows_buf,
                                       radius=buffer,
                                       run_=False)
                queue_buffer.put(module_buffer)
            queue_buffer.wait()

    # histogramm matching for ALL input scenes at once !
    # add histogramm matching as an option to t.rast.aggregate
    # to be applied to the sets of maps to be aggregated
    grass.message(_("Compute histogramm matching ..."))
    nocloudnoshadows_rasters = [val[scene_keys] for key, val in scenes.items()]
    grass.run_command('i.histo.match',
                      input=nocloudnoshadows_rasters,
                      suffix='match',
                      max=options['max'],
                      quiet=True)
    newscenekey = "%s.match" % scene_keys
    for scene_key in scenes:
        scenes[scene_key][
            newscenekey] = scenes[scene_key][scene_keys] + '.match'
        rm_rasters.append(scenes[scene_key][newscenekey])
    scene_keys = newscenekey

    nocloudnoshadows_rasters = [val[scene_keys] for key, val in scenes.items()]

    if flags['q']:
        grass.message(_("Compute quart1 for each pixel over time ..."))
        quart1 = "tmp_quart1_%s" % os.getpid()
        rm_rasters.append(quart1)
        grass.run_command('r.series',
                          input=nocloudnoshadows_rasters,
                          output=quart1,
                          method='quart1')

    grass.message(_("Compute median for each pixel over time ..."))
    median = "tmp_median_%s" % os.getpid()
    rm_rasters.append(median)
    grass.run_command('r.series',
                      input=nocloudnoshadows_rasters,
                      output=median,
                      method='median')

    grass.message(_("Compute approximations for each pixel over time ..."))
    lwr_param = [{
        'order': 2,
        'dod': 5
    }, {
        'order': 2,
        'dod': 4
    }, {
        'order': 2,
        'dod': 3
    }, {
        'order': 2,
        'dod': 2
    }, {
        'order': 2,
        'dod': 1
    }, {
        'order': 1,
        'dod': 5
    }, {
        'order': 1,
        'dod': 4
    }, {
        'order': 1,
        'dod': 3
    }, {
        'order': 1,
        'dod': 2
    }, {
        'order': 1,
        'dod': 1
    }]
    lwr_suffix_list = []
    # parallelize
    queue_lwr = ParallelModuleQueue(nprocs=nprocs)
    for lwr_p in lwr_param:
        if len(nocloudnoshadows_rasters) > (lwr_p['order'] + lwr_p['dod']):
            grass.message("Approximation with order %d and dod %s ..." %
                          (lwr_p['order'], lwr_p['dod']))
            lwr = "tmp_%s_lwr_o%d_d%d" % (os.getpid(), lwr_p['order'],
                                          lwr_p['dod'])
            lwr_suffix_list.append(lwr)

            # grass.run_command('r.series.lwr', input=nocloudnoshadows_rasters, suffix=lwr, flags='i',**lwr_p, quiet=True)
            module_lwr = Module('r.series.lwr',
                                input=nocloudnoshadows_rasters,
                                suffix=lwr,
                                flags='i',
                                **lwr_p,
                                run_=False)
            queue_lwr.put(module_lwr)
    queue_lwr.wait()

    for lwr in lwr_suffix_list:
        lwr_raster_list = [
            x for x in grass.parse_command(
                'g.list', type='raster', pattern="*%s" % lwr)
        ]
        rm_rasters.extend(lwr_raster_list)

    grass.message(_("Patching each scene ..."))
    patched_rasters_list = []
    # parallelize
    queue_patch = ParallelModuleQueue(nprocs=nprocs)
    for scene_key, num in zip(scenes, range(num_scenes)):
        grass.message(_("Scene %d of %d ...") % (num + 1, num_scenes))
        scene = scenes[scene_key]
        patch_list = [scene[scene_keys]]
        name = scene[scene_keys]
        patch_list.extend(["%s%s" % (name, x) for x in lwr_suffix_list])
        if flags['q']:
            patch_list.append(quart1)
        patch_list.append(median)
        patched = name.replace('_%s' % scene_keys, strdsout)
        # grass.run_command('r.patch', input=patch_list, output=patched)
        scenes[scene_key]['patched'] = patched
        patched_rasters_list.append(patched)
        module_patch = Module('r.patch',
                              input=patch_list,
                              output=patched,
                              run_=False)
        queue_patch.put(module_patch)
    queue_patch.wait()

    grass.message(_("Compute PATCHED count for each pixel over time ..."))
    count = "tmp_PATCHEDcount_%s" % os.getpid()
    rm_rasters.append(count)
    grass.run_command('r.series',
                      input=patched_rasters_list,
                      output=count,
                      method='count')
    r_univar = grass.parse_command('r.univar', map=count, flags='g')
    if int(r_univar['min']) < int(r_univar['max']):
        grass.warning(_("Not all gaps are closed"))
        # TODO fill gaps?

    grass.message(
        _("Create strds <%s> for patched filled scenes ...") % strdsout)
    input_info = grass.parse_command('t.info', input=strds)
    title, desc = False, False
    title_str, desc_str = None, None
    for key in input_info:
        if 'Title:' in key:
            title = True
        elif title:
            title = False
            if 'Description:' not in key:
                title_str = key.replace('|', '').strip()
        elif 'Description:' in key:
            desc = True
        elif desc:
            desc = False
            if 'Command history:' not in key:
                desc_str = key.replace('|', '').strip()
    grass.run_command('t.create',
                      output=strdsout + '_tmp',
                      title=("%s gaps(/clouds/shadows) filled" %
                             title_str) if title_str else "mosaic",
                      desc=("%s: gaps(/clouds/shadows) filled" %
                            desc_str) if desc_str else
                      ("%s: gaps(/clouds/shadows) filled" %
                       title_str) if title_str else "mosaic",
                      quiet=True)
    rm_strds.append(strdsout + '_tmp')

    # create register file
    registerfile = grass.tempfile()
    file = open(registerfile, 'w')
    for scene_key in scenes:
        scene = scenes[scene_key]
        file.write("%s|%s\n" % (scene['patched'], scene['date']))
        rm_rasters.append(scene['patched'])
    file.close()
    grass.run_command('t.register',
                      input=strdsout + '_tmp',
                      file=registerfile,
                      quiet=True)
    # remove registerfile
    grass.try_remove(registerfile)

    grass.message(
        _("Mosaicing the scenes with method %s ...") % options['method'])
    if not options['granularity'] == 'all':
        grass.run_command('t.rast.aggregate',
                          input=strdsout + '_tmp',
                          output=strdsout,
                          basename=strdsout,
                          granularity=options['granularity'],
                          method=options['method'],
                          quiet=True,
                          nprocs=options['nprocs'])
    else:
        rasters = [
            x.split('|')[0] for x in grass.parse_command(
                't.rast.list', input=strdsout + '_tmp', flags='u')
        ]
        grass.run_command('r.series',
                          input=rasters,
                          output=strdsout,
                          method=options['method'])
        grass.message(_("<%s> created") % strdsout)
def main():
    # User specified variables
    dem = options['elevation']
    neighborhood_size = options['size']
    OutRaster = options['output']
    notparallel = flags['p']

    # Internal raster map names
    SlopeRaster = 'tmpSlope_' + ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(8)])
    AspectRaster = 'tmpAspect_' + ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(8)])
    xyRaster = 'tmpxyRaster_' + ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(8)])
    zRaster = 'tmpzRaster_' + ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(8)])
    xRaster = 'tmpxRaster_' + ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(8)])
    yRaster = 'tmpyRaster_' + ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(8)])
    xSumRaster = 'tmpxSumRaster_' + ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(8)])
    ySumRaster = 'tmpySumRaster_' + ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(8)])
    zSumRaster = 'tmpzSumRaster_' + ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(8)])

    tmp_rast.append(SlopeRaster)
    tmp_rast.append(AspectRaster)
    tmp_rast.append(xyRaster)
    tmp_rast.append(zRaster)
    tmp_rast.append(xRaster)
    tmp_rast.append(yRaster)
    tmp_rast.append(xSumRaster)
    tmp_rast.append(ySumRaster)
    tmp_rast.append(zSumRaster)

    # Create Slope and Aspect rasters
    grass.message("Calculating slope and aspect...")
    grass.run_command("r.slope.aspect",
                      elevation = dem,
                      slope = SlopeRaster,
                      aspect = AspectRaster,
                      format = "degrees",
                      precision = "FCELL",
                      zscale = 1.0,
                      min_slope = 0.0,
                      quiet = True)

    # Calculate x y and z rasters
    # Note - GRASS sin/cos functions differ from ArcGIS which expects input grid in radians, whereas GRASS functions expect degrees
    # No need to convert slope and aspect to radians as in the original ArcGIS script

    if notparallel == False:
        # parallel version
        grass.message("Calculating x, y, and z rasters...")

        # calculate xy and z rasters using two parallel processes
        mapcalc_list = []
        mapcalc = Module("r.mapcalc", run_=False)
        queue = ParallelModuleQueue(nprocs=2)

        mapcalc1 = copy.deepcopy(mapcalc)
        mapcalc_list.append(mapcalc1)
        m = mapcalc1(expression='{x} = float(sin({a}))'.format(x=xyRaster, a=SlopeRaster))
        queue.put(m)

        mapcalc2 = copy.deepcopy(mapcalc)
        mapcalc_list.append(mapcalc2)
        m = mapcalc2(expression='{x} = float(cos({a}))'.format(x=zRaster, a=SlopeRaster))
        queue.put(m)

        queue.wait()

        # calculate x and y rasters using two parallel processes
        mapcalc_list = []
        mapcalc = Module("r.mapcalc", run_=False)
        queue = ParallelModuleQueue(nprocs=2)

        mapcalc1 = copy.deepcopy(mapcalc)
        mapcalc_list.append(mapcalc1)
        m = mapcalc1(expression='{x} = float(sin({a}) * {b})'.format(x=xRaster, a=AspectRaster, b=xyRaster))
        queue.put(m)

        mapcalc2 = copy.deepcopy(mapcalc)
        mapcalc_list.append(mapcalc2)
        m = mapcalc2(expression='{x} = float(cos({a}) * {b})'.format(x=yRaster, a=AspectRaster, b=xyRaster))
        queue.put(m)

        queue.wait()
    else:
        grass.mapcalc('{x} = float(sin({a}))'.format(x=xyRaster, a=SlopeRaster))
        grass.mapcalc('{x} = float(cos({a}))'.format(x=zRaster, a=SlopeRaster))
        grass.mapcalc('{x} = float(sin({a}) * {b})'.format(x=xRaster, a=AspectRaster, b=xyRaster))
        grass.mapcalc('{x} = float(cos({a}) * {b})'.format(x=yRaster, a=AspectRaster, b=xyRaster))

    # Calculate sums of x, y, and z rasters for selected neighborhood size

    if notparallel == False:
        # parallel version using three parallel processes
        grass.message("Calculating sums of x, y, and z rasters in selected neighborhood...")

        n_list = []
        neighbors = Module("r.neighbors", overwrite=True, run_=False)
        queue = ParallelModuleQueue(nprocs=3)

        n1 = copy.deepcopy(neighbors)
        n_list.append(n1)
        n = n1(input = xRaster, output = xSumRaster, method = "average", size = neighborhood_size)
        queue.put(n1)

        n2 = copy.deepcopy(neighbors)
        n_list.append(n2)
        n = n2(input = yRaster, output = ySumRaster, method = "average", size = neighborhood_size)
        queue.put(n2)

        n3 = copy.deepcopy(neighbors)
        n_list.append(n3)
        n = n3(input = zRaster, output = zSumRaster, method = "average", size = neighborhood_size)
        queue.put(n3)

        queue.wait()
    else:
        grass.run_command("r.neighbors", input = xRaster, output = xSumRaster, method = "average", size = neighborhood_size)
        grass.run_command("r.neighbors", input = yRaster, output = ySumRaster, method = "average", size = neighborhood_size)
        grass.run_command("r.neighbors", input = zRaster, output = zSumRaster, method = "average", size = neighborhood_size)

    # Calculate the resultant vector and final ruggedness raster
    # Modified from the original script to multiple each SumRaster by the n neighborhood cells to get the sum
    grass.message("Calculating the final ruggedness raster...")
    maxValue = int(neighborhood_size) * int(neighborhood_size)
    grass.mapcalc('{x} = float(1-( (sqrt(({a}*{d})^2 + ({b}*{d})^2 + ({c}*{d})^2) / {e})))'.format(x=OutRaster, a=xSumRaster, b=ySumRaster, c=zSumRaster, d=maxValue, e=maxValue))

    # Set the default color table
    grass.run_command("r.colors", flags = 'e', map = OutRaster, color = "ryb")

    return 0
Exemple #11
0
def main():

    global rm_regions, rm_rasters, rm_vectors, tmpfolder

    # parameters
    s2names = options['s2names'].split(',')
    tmpdirectory = options['directory']

    test_nprocs_memory()

    grass.message(_("Downloading Sentinel scenes ..."))
    if not grass.find_program('i.sentinel.download', '--help'):
        grass.fatal(_("The 'i.sentinel.download' module was not found, install it first:") +
                    "\n" +
                    "g.extension i.sentinel")
    if not grass.find_program('i.sentinel.import', '--help'):
        grass.fatal(_("The 'i.sentinel.import' module was not found, install it first:") +
                    "\n" +
                    "g.extension i.sentinel")
    if not grass.find_program('i.sentinel.parallel.download', '--help'):
        grass.fatal(_("The 'i.sentinel.parallel.download' module was not found, install it first:") +
                    "\n" +
                    "g.extension i.sentinel")

    # create temporary directory to download data
    if tmpdirectory:
        if not os.path.isdir(tmpdirectory):
            try:
                os.makedirs(tmpdirectory)
            except:
                grass.fatal(_("Unable to create temp dir"))
    else:
        tmpdirectory = grass.tempdir()
        tmpfolder = tmpdirectory

    if os.path.isfile(s2names[0]):
        with open(s2names[0], 'r') as f:
            s2namesstr = f.read()
    else:
        s2namesstr = ','.join(s2names)

    grass.run_command(
        'i.sentinel.parallel.download',
        settings=options['settings'],
        scene_name=s2namesstr,
        nprocs=options['nprocs'],
        output=tmpdirectory,
        flags="fs",
        quiet=True)

    grass.message(_("Importing Sentinel scenes ..."))
    env = grass.gisenv()
    start_gisdbase = env['GISDBASE']
    start_location = env['LOCATION_NAME']
    start_cur_mapset = env['MAPSET']

    if len(s2namesstr.split(',')) < int(options['nprocs']):
        procs_import = len(s2namesstr.split(','))
    else:
        procs_import = int(options['nprocs'])
    ### save current region
    id = str(os.getpid())
    currentregion = 'tmp_region_' + id
    grass.run_command('g.region', save=currentregion, flags='p')

    queue_import = ParallelModuleQueue(nprocs=procs_import)
    memory_per_proc = round(float(options['memory'])/procs_import)
    mapsetids = []
    importflag = 'r'
    if flags['i']:
        importflag += 'i'
    if flags['c']:
        importflag += 'c'
    json_standard_folder = os.path.join(env['GISDBASE'], env['LOCATION_NAME'], env['MAPSET'], 'cell_misc')
    if not os.path.isdir(json_standard_folder):
        os.makedirs(json_standard_folder)
    for idx,subfolder in enumerate(os.listdir(tmpdirectory)):
        if os.path.isdir(os.path.join(tmpdirectory, subfolder)):
            mapsetid = 'S2_import_%s' %(str(idx+1))
            mapsetids.append(mapsetid)
            directory = os.path.join(tmpdirectory, subfolder)
            i_sentinel_import = Module(
                'i.sentinel.import.worker',
                input=directory,
                mapsetid=mapsetid,
                memory=memory_per_proc,
                pattern=options['pattern'],
                flags=importflag,
                region=currentregion,
                metadata=json_standard_folder,
                run_=False
            )
            queue_import.put(i_sentinel_import)
    queue_import.wait()
    grass.run_command('g.remove', type='region', name=currentregion, flags='f')

    # verify that switching the mapset worked
    env = grass.gisenv()
    gisdbase = env['GISDBASE']
    location = env['LOCATION_NAME']
    cur_mapset = env['MAPSET']
    if cur_mapset != start_cur_mapset:
        grass.fatal("New mapset is <%s>, but should be <%s>" % (cur_mapset, start_cur_mapset))

    # copy maps to current mapset
    maplist = []
    cloudlist = []
    for new_mapset in mapsetids:
        for vect in grass.parse_command('g.list', type='vector', mapset=new_mapset):
            cloudlist.append(vect)
            grass.run_command('g.copy', vector=vect + '@' + new_mapset + ',' + vect)
        for rast in grass.parse_command('g.list', type='raster', mapset=new_mapset):
            maplist.append(rast)
            grass.run_command('g.copy', raster=rast + '@' + new_mapset + ',' + rast)
            # set nulls
            grass.run_command('i.zero2null', map=rast, quiet=True)
        grass.utils.try_rmdir(os.path.join(gisdbase, location, new_mapset))

    # space time dataset
    grass.message(_("Creating STRDS of Sentinel scenes ..."))
    if options['strds_output']:
        strds = options['strds_output']
        grass.run_command(
            't.create', output=strds, title="Sentinel-2",
            desc="Sentinel-2", quiet=True)

        # create register file
        registerfile = grass.tempfile()
        file = open(registerfile, 'w')
        for imp_rast in list(set(maplist)):
            date_str1 = imp_rast.split('_')[1].split('T')[0]
            date_str2 = "%s-%s-%s" % (date_str1[:4], date_str1[4:6], date_str1[6:])
            time_str = imp_rast.split('_')[1].split('T')[1]
            clock_str2 = "%s:%s:%s" % (time_str[:2], time_str[2:4], time_str[4:])
            file.write("%s|%s %s\n" % (imp_rast, date_str2, clock_str2))
        file.close()
        grass.run_command('t.register', input=strds, file=registerfile, quiet=True)
        # remove registerfile
        grass.try_remove(registerfile)

        if flags['c']:
            stvdsclouds = strds + '_clouds'
            grass.run_command(
                't.create', output=stvdsclouds, title="Sentinel-2 clouds",
                desc="Sentinel-2 clouds", quiet=True, type='stvds')
            registerfileclouds = grass.tempfile()
            fileclouds = open(registerfileclouds, 'w')
            for imp_clouds in cloudlist:
                date_str1 = imp_clouds.split('_')[1].split('T')[0]
                date_str2 = "%s-%s-%s" % (date_str1[:4], date_str1[4:6], date_str1[6:])
                time_str = imp_clouds.split('_')[1].split('T')[1]
                clock_str2 = "%s:%s:%s" % (time_str[:2], time_str[2:4], time_str[4:])
                fileclouds.write("%s|%s %s\n" % (imp_clouds, date_str2, clock_str2))
            fileclouds.close()
            grass.run_command(
                't.register', type='vector', input=stvdsclouds, file=registerfileclouds, quiet=True)
            grass.message("<%s> is created" % (stvdsclouds))
            # remove registerfile
            grass.try_remove(registerfileclouds)

        # extract strds for each band
        bands = []
        pattern = options['pattern']
        if "(" in pattern:
            global beforebrackets, afterbrackets
            beforebrackets = re.findall(r"(.*?)\(", pattern)[0]
            inbrackets = re.findall(r"\((.*?)\)", pattern)[0]
            afterbrackets = re.findall(r"\)(.*)", pattern)[0]
            bands = ["%s%s%s" % (beforebrackets, x, afterbrackets) for x in inbrackets.split('|')]
        else:
            bands = pattern.split('|')

        for band in bands:
            if flags['i'] and ('20' in band or '60' in band):
                band.replace('20', '10').replace('60', '10')
            grass.run_command('t.rast.extract', input=strds, where="name like '%" + band + "%'", output="%s_%s" % (strds, band), quiet=True)
            grass.message("<%s_%s> is created" % (strds, band))
Exemple #12
0
def import2grass(files, args, datefmt="%Y%m", mapset_fmt="%Y_%m",
                 raster_fmt="%Y_%m", input_fmt="NETCDF:{input_file}",
                 **kwargs):
    # old variables
    nprocs = args.nprocs
    gisdbase = args.grassdata
    location = args.location
    mapset = args.mapset
    rename = args.rename
    convert = args.convert
    outs = {}
    env = os.environ.copy()
    mset_envs = {}
    mset_rasters = {}
    if nprocs > 1:
        queue = ParallelModuleQueue(nprocs=nprocs)

    for fdir, fil in files:
        base, date = extract_date(fil, datefmt=datefmt)
        if base not in outs.keys():
            outs[base] = []
        else:
            outs[base].append(date)
        if mapset_fmt:
            mset_name = date.strftime(mapset_fmt)
            mset_path = os.path.join(gisdbase, location, mset_name)
            if not os.path.exists(mset_path):
                gs.grass_create(gs.GRASSBIN, mset_path, create_opts="")
                try:
                    os.makedirs(os.path.join(mset_path, '.tmp'))
                    os.makedirs(os.path.join(mset_path, '.tmp',
                                             socket.gethostname()))
                except:
                    # ignore error in creating the
                    pass
            try:
                menv = mset_envs[mset_name]
                rasters = mset_rasters[mset_name]
            except KeyError:
                menv = gs.grass_init(gs.GISBASE, gisdbase, location, mset_name,
                                     env=env.copy())
                mset_envs[mset_name] = menv
                mset = Mapset(mset_name, location=location, gisdbase=gisdbase)
                rasters = set(mset.glist("raster"))
                mset_rasters[mset_name] = rasters
        else:
            menv = gs.grass_init(gs.GISBASE, gisdbase, location, mapset,
                                 env=env.copy())
            mset = Mapset(mapset, location=location, gisdbase=gisdbase)
            rasters = set(mset.glist("raster"))
        rast_name = "{ba}_{da}".format(ba=base, da=date.strftime(raster_fmt))
        if rast_name + '.1' not in rasters or rast_name + '.6' not in rasters:
            ifile = os.path.join(fdir, fil)
            mod = Module("r.in.gdal", quiet=True,
                         input=input_fmt.format(input_file=ifile),
                         output=rast_name, run_=False, **kwargs)
            if nprocs > 1:
                mod.env_ = menv
                #time.sleep(0.2) # sllep otherwise there is a problem in creating
                queue.put(mod)
            else:
                mod.run()
                if convert:
                    convert_maps(base, date, log=args.log)
                if rename:
                    rename_maps(base, date, log=args.log)
    if nprocs > 1:
        queue.wait()
    return outs
Exemple #13
0
    map_file = map_timestamps(directory, bands)
    Module('t.register', input=output, file=map_file, quiet=True, overwrite=True)

def check_strds(name):
    strds = grass.read_command('t.list', quiet=True).splitlines()
    if name + '@' + mapset not in strds:
        return
    
    if os.environ.get('GRASS_OVERWRITE', '0') == '1':
        grass.warning("Space time raster dataset <{}> is already exists "
                      "and will be overwritten.".format(name))
        Module('t.remove', inputs=name, quiet=True)
    else:
        grass.fatal("Space time raster dataset <{}> is already in the database. "
                    "Use the overwrite flag.".format(name))
    
def main():
    check_strds(opt['output'])
    maps = filter_data(int(opt['cloud_cover']))
    bands = clip_mask_data(maps)
    create_strds(opt['input'], opt['output'], bands)
    
if __name__ == "__main__":
    opt, flgs = grass.parser()
    mapset=grass.gisenv()['MAPSET']

    # queue for parallel jobs
    queue = ParallelModuleQueue(8)

    sys.exit(main())
Exemple #14
0
def reproject(igisdbase,
              ilocation,
              olocation,
              mset_pat,
              rast_pat,
              datefmt="%Y%m%d_%H",
              mapset_fmt="r%Y_%m",
              raster_fmt="T{elev:03d}m_%Y%m%d_%H",
              nprocs=4,
              ogisdbase=None,
              **kwargs):
    env = os.environ.copy()
    ogisdbase = igisdbase if ogisdbase is None else ogisdbase
    mset_envs = {}
    mset_rasters = {}
    queue = ParallelModuleQueue(nprocs=nprocs)
    iloc = Location(location=ilocation, gisdbase=igisdbase)
    # oloc = Location(location=olocation, gisdbase=ogisdbase)
    #import ipdb; ipdb.set_trace()
    for imset_name in iloc.mapsets(pattern=mset_pat):
        for rname in iloc[imset_name].glist("raster", pattern=rast_pat):
            base, date, elev = extract_date(rname, datefmt=datefmt)
            rast_name = date.strftime(raster_fmt.format(elev=elev))
            mset_name = date.strftime(mapset_fmt)
            mset_path = os.path.join(ogisdbase, olocation, mset_name)
            if not os.path.exists(mset_path):
                gs.grass_create(gs.GRASSBIN, mset_path, create_opts="")
                try:
                    os.makedirs(os.path.join(mset_path, '.tmp'))
                    os.makedirs(
                        os.path.join(mset_path, '.tmp', socket.gethostname()))
                except:
                    # ignore error in creating the
                    pass
            try:
                menv = mset_envs[mset_name]
                rasters = mset_rasters[mset_name]
            except KeyError:
                menv = gs.grass_init(gs.GISBASE,
                                     ogisdbase,
                                     olocation,
                                     mset_name,
                                     env=env.copy())
                mset_envs[mset_name] = menv
                mset = Mapset(mset_name,
                              location=olocation,
                              gisdbase=ogisdbase)
                rasters = set(mset.glist("raster"))
                mset_rasters[mset_name] = rasters
                # set region for the mapset
                sregion = read_command("r.proj",
                                       location=ilocation,
                                       dbase=igisdbase,
                                       mapset=imset_name,
                                       input=rname,
                                       output=rast_name,
                                       flags="g",
                                       env=menv)
                #import ipdb; ipdb.set_trace()
                kregion = dict([tuple(s.split('=')) for s in sregion.split()])
                run_command("g.region",
                            save=mset_name,
                            env=menv,
                            overwrite=True,
                            **kregion)
                menv["WIND_OVERRIDE"] = mset_name

            if rast_name not in rasters:
                mod = Module("r.proj",
                             location=ilocation,
                             dbase=igisdbase,
                             mapset=imset_name,
                             input=rname,
                             output=rast_name,
                             run_=False,
                             **kwargs)
                mod.env_ = menv
                print(rast_name)
                #time.sleep(0.2) # sllep otherwise there is a problem in creating
                queue.put(mod)
    queue.wait()
Exemple #15
0
def main():

    global rm_regions, rm_rasters, rm_vectors, tmpfolder

    # parameters
    if options['s2names']:
        s2names = options['s2names'].split(',')
        if os.path.isfile(s2names[0]):
            with open(s2names[0], 'r') as f:
                s2namesstr = f.read()
        else:
            s2namesstr = ','.join(s2names)
    tmpdirectory = options['directory']

    test_nprocs_memory()

    if not grass.find_program('i.sentinel.download', '--help'):
        grass.fatal(
            _("The 'i.sentinel.download' module was not found, install it first:"
              ) + "\n" + "g.extension i.sentinel")
    if not grass.find_program('i.sentinel.import', '--help'):
        grass.fatal(
            _("The 'i.sentinel.import' module was not found, install it first:"
              ) + "\n" + "g.extension i.sentinel")
    if not grass.find_program('i.sentinel.parallel.download', '--help'):
        grass.fatal(
            _("The 'i.sentinel.parallel.download' module was not found, install it first:"
              ) + "\n" + "g.extension i.sentinel")
    if not grass.find_program('i.zero2null', '--help'):
        grass.fatal(
            _("The 'i.zero2null' module was not found, install it first:") +
            "\n" + "g.extension i.zero2null")

    # create temporary directory to download data
    if tmpdirectory:
        if not os.path.isdir(tmpdirectory):
            try:
                os.makedirs(tmpdirectory)
            except:
                grass.fatal(_("Unable to create temp dir"))

    else:
        tmpdirectory = grass.tempdir()
        tmpfolder = tmpdirectory

    # make distinct download and sen2cor directories
    try:
        download_dir = os.path.join(tmpdirectory,
                                    'download_{}'.format(os.getpid()))
        os.makedirs(download_dir)
    except Exception as e:
        grass.fatal(_('Unable to create temp dir {}').format(download_dir))

    download_args = {
        'settings': options['settings'],
        'nprocs': options['nprocs'],
        'output': download_dir,
        'datasource': options['datasource'],
        'flags': 'f'
    }
    if options['limit']:
        download_args['limit'] = options['limit']
    if options['s2names']:
        download_args['flags'] += 's'
        download_args['scene_name'] = s2namesstr.strip()
        if options['datasource'] == 'USGS_EE':
            if flags['e']:
                download_args['flags'] += 'e'
            download_args['producttype'] = 'S2MSI1C'
    else:
        download_args['clouds'] = options['clouds']
        download_args['start'] = options['start']
        download_args['end'] = options['end']
        download_args['producttype'] = options['producttype']

    grass.run_command('i.sentinel.parallel.download', **download_args)
    number_of_scenes = len(os.listdir(download_dir))
    nprocs_final = min(number_of_scenes, int(options['nprocs']))

    # run atmospheric correction
    if flags['a']:
        sen2cor_folder = os.path.join(tmpdirectory,
                                      'sen2cor_{}'.format(os.getpid()))
        try:
            os.makedirs(sen2cor_folder)
        except Exception as e:
            grass.fatal(
                _("Unable to create temporary sen2cor folder {}").format(
                    sen2cor_folder))
        grass.message(
            _('Starting atmospheric correction with sen2cor...').format(
                nprocs_final))
        queue_sen2cor = ParallelModuleQueue(nprocs=nprocs_final)
        for idx, subfolder in enumerate(os.listdir(download_dir)):
            folderpath = os.path.join(download_dir, subfolder)
            for file in os.listdir(folderpath):
                if file.endswith('.SAFE'):
                    filepath = os.path.join(folderpath, file)
            output_dir = os.path.join(sen2cor_folder,
                                      'sen2cor_result_{}'.format(idx))
            sen2cor_module = Module(
                'i.sentinel-2.sen2cor',
                input_file=filepath,
                output_dir=output_dir,
                sen2cor_path=options['sen2cor_path'],
                nprocs=1,
                run_=False
                # all remaining sen2cor parameters can be left as default
            )
            queue_sen2cor.put(sen2cor_module)
        queue_sen2cor.wait()
        download_dir = sen2cor_folder

    grass.message(_("Importing Sentinel scenes ..."))
    env = grass.gisenv()
    start_gisdbase = env['GISDBASE']
    start_location = env['LOCATION_NAME']
    start_cur_mapset = env['MAPSET']
    ### save current region
    id = str(os.getpid())
    currentregion = 'tmp_region_' + id
    grass.run_command('g.region', save=currentregion, flags='p')

    queue_import = ParallelModuleQueue(nprocs=nprocs_final)
    memory_per_proc = round(float(options['memory']) / nprocs_final)
    mapsetids = []
    importflag = 'r'
    if flags['i']:
        importflag += 'i'
    if flags['c']:
        importflag += 'c'
    json_standard_folder = os.path.join(env['GISDBASE'], env['LOCATION_NAME'],
                                        env['MAPSET'], 'cell_misc')
    if not os.path.isdir(json_standard_folder):
        os.makedirs(json_standard_folder)
    subfolders = []
    for idx, subfolder in enumerate(os.listdir(download_dir)):
        if os.path.isdir(os.path.join(download_dir, subfolder)):
            subfolders.append(subfolder)
            mapsetid = 'S2_import_%s' % (str(idx + 1))
            mapsetids.append(mapsetid)
            directory = os.path.join(download_dir, subfolder)
            i_sentinel_import = Module('i.sentinel.import.worker',
                                       input=directory,
                                       mapsetid=mapsetid,
                                       memory=memory_per_proc,
                                       pattern=options['pattern'],
                                       flags=importflag,
                                       region=currentregion,
                                       metadata=json_standard_folder,
                                       run_=False)
            queue_import.put(i_sentinel_import)
    queue_import.wait()
    grass.run_command('g.remove', type='region', name=currentregion, flags='f')
    # verify that switching the mapset worked
    env = grass.gisenv()
    gisdbase = env['GISDBASE']
    location = env['LOCATION_NAME']
    cur_mapset = env['MAPSET']
    if cur_mapset != start_cur_mapset:
        grass.fatal("New mapset is <%s>, but should be <%s>" %
                    (cur_mapset, start_cur_mapset))
    # copy maps to current mapset
    maplist = []
    cloudlist = []
    for new_mapset in mapsetids:
        for vect in grass.parse_command('g.list',
                                        type='vector',
                                        mapset=new_mapset):
            cloudlist.append(vect)
            grass.run_command('g.copy',
                              vector=vect + '@' + new_mapset + ',' + vect)
        for rast in grass.parse_command('g.list',
                                        type='raster',
                                        mapset=new_mapset):
            maplist.append(rast)
            grass.run_command('g.copy',
                              raster=rast + '@' + new_mapset + ',' + rast)
            # set nulls
            grass.run_command('i.zero2null', map=rast, quiet=True)
        grass.utils.try_rmdir(os.path.join(gisdbase, location, new_mapset))
    # space time dataset
    grass.message(_("Creating STRDS of Sentinel scenes ..."))
    if options['strds_output']:
        strds = options['strds_output']
        grass.run_command('t.create',
                          output=strds,
                          title="Sentinel-2",
                          desc="Sentinel-2",
                          quiet=True)

        # create register file
        registerfile = grass.tempfile()
        file = open(registerfile, 'w')
        for imp_rast in list(set(maplist)):
            date_str1 = imp_rast.split('_')[1].split('T')[0]
            date_str2 = "%s-%s-%s" % (date_str1[:4], date_str1[4:6],
                                      date_str1[6:])
            time_str = imp_rast.split('_')[1].split('T')[1]
            clock_str2 = "%s:%s:%s" % (time_str[:2], time_str[2:4],
                                       time_str[4:])
            file.write("%s|%s %s\n" % (imp_rast, date_str2, clock_str2))
        file.close()
        grass.run_command('t.register',
                          input=strds,
                          file=registerfile,
                          quiet=True)
        # remove registerfile
        grass.try_remove(registerfile)

        if flags['c']:
            stvdsclouds = strds + '_clouds'
            grass.run_command('t.create',
                              output=stvdsclouds,
                              title="Sentinel-2 clouds",
                              desc="Sentinel-2 clouds",
                              quiet=True,
                              type='stvds')
            registerfileclouds = grass.tempfile()
            fileclouds = open(registerfileclouds, 'w')
            for imp_clouds in cloudlist:
                date_str1 = imp_clouds.split('_')[1].split('T')[0]
                date_str2 = "%s-%s-%s" % (date_str1[:4], date_str1[4:6],
                                          date_str1[6:])
                time_str = imp_clouds.split('_')[1].split('T')[1]
                clock_str2 = "%s:%s:%s" % (time_str[:2], time_str[2:4],
                                           time_str[4:])
                fileclouds.write("%s|%s %s\n" %
                                 (imp_clouds, date_str2, clock_str2))
            fileclouds.close()
            grass.run_command('t.register',
                              type='vector',
                              input=stvdsclouds,
                              file=registerfileclouds,
                              quiet=True)
            grass.message("<%s> is created" % (stvdsclouds))
            # remove registerfile
            grass.try_remove(registerfileclouds)

        # extract strds for each band
        bands = []
        pattern = options['pattern']
        if "(" in pattern:
            global beforebrackets, afterbrackets
            beforebrackets = re.findall(r"(.*?)\(", pattern)[0]
            inbrackets = re.findall(r"\((.*?)\)", pattern)[0]
            afterbrackets = re.findall(r"\)(.*)", pattern)[0]
            bands = [
                "%s%s%s" % (beforebrackets, x, afterbrackets)
                for x in inbrackets.split('|')
            ]
        else:
            bands = pattern.split('|')

        for band in bands:
            if flags['i'] and ('20' in band or '60' in band):
                band.replace('20', '10').replace('60', '10')
            grass.run_command('t.rast.extract',
                              input=strds,
                              where="name like '%" + band + "%'",
                              output="%s_%s" % (strds, band),
                              quiet=True)
            grass.message("<%s_%s> is created" % (strds, band))
def main():

    global rm_regions, rm_rasters, rm_vectors, tmpfolder

    # parameters
    strds = options['input']
    strdsout = options['output_clouds']
    threshold = float(options['threshold'])

    test_nprocs()

    # test if necessary GRASS GIS addons are installed
    if not grass.find_program('i.sentinel.mask', '--help'):
        grass.fatal(
            _("The 'i.sentinel.mask' module was not found, install it first:")
            + "\n" + "g.extension i.sentinel")
    if not grass.find_program('i.sentinel.mask.worker', '--help'):
        grass.fatal(
            _("The 'i.sentinel.mask.worker' module was not found, install it first:"
              ) + "\n" + "g.extension i.sentinel.mask.worker url=...")

    strdsrasters = [
        x.split('|')[0]
        for x in grass.parse_command('t.rast.list', input=strds, flags='u')
    ]
    times = [
        x.split('|')[2]
        for x in grass.parse_command('t.rast.list', input=strds, flags='u')
    ]
    s2_scenes = dict()
    for strdsrast, time in zip(strdsrasters, times):
        # check if strdsrast has data, skip otherwise
        stats = grass.parse_command("r.info", map=strdsrast, flags="r")
        if stats["min"] == "NULL" and stats["max"] == "NULL":
            grass.warning(
                _("Raster {} only consists of NULL() in current "
                  "region. Cloud/shadow detection "
                  "is skipped.").format(strdsrast))
            continue
        parts = strdsrast.split('_')
        name = "%s_%s" % (parts[0], parts[1])
        band = parts[2]
        if name not in s2_scenes:
            s2_scene = {
                'B02': None,
                'B03': None,
                'B04': None,
                'B08': None,
                'B8A': None,
                'B11': None,
                'B12': None,
                'date': None
            }
            s2_scene['clouds'] = "%s_clouds" % name
            if options['output_shadows']:
                s2_scene['shadows'] = "%s_shadows" % name
                s2_scene['shadows'] = "%s_shadows" % name
            if threshold > 0 or options['output_shadows']:
                if options['metadata'] == 'default':
                    env = grass.gisenv()
                    json_standard_folder = os.path.join(
                        env['GISDBASE'], env['LOCATION_NAME'], env['MAPSET'],
                        'cell_misc')
                    s2_scene['metadata'] = os.path.join(
                        json_standard_folder, strdsrast, "description.json")
                elif options['metadata']:
                    json_standard_folder = options['metadata']
                    s2_scene['metadata'] = os.path.join(
                        json_standard_folder, strdsrast, "description.json")
            s2_scenes[name] = s2_scene
        s2_scenes[name][band] = strdsrast
        if not s2_scenes[name]['date']:
            if '.' in time:
                dateformat = '%Y-%m-%d %H:%M:%S.%f'
            else:
                dateformat = '%Y-%m-%d %H:%M:%S'
            s2_scenes[name]['date'] = datetime.strptime(time, dateformat)

    # check if all input bands are in strds
    for key in s2_scenes:
        if any([val is None for key2, val in s2_scenes[key].items()]):
            grass.fatal(_("Not all needed bands are given"))

    grass.message(_("Find clouds (and shadows) in Sentinel scenes ..."))
    env = grass.gisenv()
    start_gisdbase = env['GISDBASE']
    start_location = env['LOCATION_NAME']
    start_cur_mapset = env['MAPSET']

    queue = ParallelModuleQueue(nprocs=options['nprocs'])
    bands = ['B02', 'B03', 'B04', 'B08', 'B8A', 'B11', 'B12']
    number_of_scenes = len(s2_scenes)
    number = 0
    for s2_scene_name in s2_scenes:
        s2_scene = s2_scenes[s2_scene_name]
        number += 1
        grass.message(
            _("Processing %d of %d scenes") % (number, number_of_scenes))
        if threshold > 0:
            with open(s2_scene['metadata'], 'r') as f:
                data = json.load(f)
            if threshold > float(data['CLOUDY_PIXEL_PERCENTAGE']):
                computingClouds = False
            else:
                computingClouds = True
        else:
            computingClouds = True
        for band in bands:
            rm_rasters.append("%s_double" % s2_scene[band])
        if computingClouds:
            kwargs = dict()
            if options['output_shadows']:
                kwargs['shadow_raster'] = s2_scene['shadows']
                kwargs['metadata'] = s2_scene['metadata']
                kwargs['shadow_threshold'] = 1000
                flags = 's'
            else:
                flags = 'sc'
            newmapset = s2_scene['clouds']
            # grass.run_command(
            i_sentinel_mask = Module(
                'i.sentinel.mask.worker',
                blue="%s@%s" % (s2_scene['B02'], start_cur_mapset),
                green="%s@%s" % (s2_scene['B03'], start_cur_mapset),
                red="%s@%s" % (s2_scene['B04'], start_cur_mapset),
                nir="%s@%s" % (s2_scene['B08'], start_cur_mapset),
                nir8a="%s@%s" % (s2_scene['B8A'], start_cur_mapset),
                swir11="%s@%s" % (s2_scene['B11'], start_cur_mapset),
                swir12="%s@%s" % (s2_scene['B12'], start_cur_mapset),
                flags=flags,
                cloud_raster=s2_scene['clouds'],
                newmapset=newmapset,
                quiet=True,
                run_=False,
                **kwargs)
            queue.put(i_sentinel_mask)
    queue.wait()

    # verify that switching the mapset worked
    env = grass.gisenv()
    gisdbase = env['GISDBASE']
    location = env['LOCATION_NAME']
    cur_mapset = env['MAPSET']
    if cur_mapset != start_cur_mapset:
        grass.fatal("New mapset is <%s>, but should be <%s>" %
                    (cur_mapset, start_cur_mapset))

    # copy maps to current mapset
    for s2_scene_name in s2_scenes:
        s2_scene = s2_scenes[s2_scene_name]
        newmapset = s2_scene['clouds']
        if grass.find_file(s2_scene['clouds'],
                           element='raster',
                           mapset=newmapset)['file']:
            if options['min_size_clouds']:
                try:
                    grass.run_command('r.reclass.area',
                                      input="%s@%s" %
                                      (s2_scene['clouds'], newmapset),
                                      output=s2_scene['clouds'],
                                      value=options['min_size_clouds'],
                                      mode='greater',
                                      quiet=True)
                except Exception as e:
                    # todo: remove workaround once r.reclass.area is updated
                    grass.message(
                        _('No clouds larger than %s ha detected. Image is considered cloudfree.'
                          % options['min_size_clouds']))
                    exp_null = '%s = null()' % s2_scene['clouds']
                    grass.run_command('r.mapcalc',
                                      expression=exp_null,
                                      quiet=True)
            else:
                grass.run_command(
                    'g.copy',
                    raster="%s@%s,%s" %
                    (s2_scene['clouds'], newmapset, s2_scene['clouds']))
        else:
            grass.run_command('r.mapcalc',
                              expression="%s = null()" % s2_scene['clouds'])
        if options['output_shadows']:
            if grass.find_file(s2_scene['shadows'],
                               element='raster',
                               mapset=newmapset)['file']:
                if options['min_size_shadows']:
                    try:
                        grass.run_command('r.reclass.area',
                                          input="%s@%s" %
                                          (s2_scene['shadows'], newmapset),
                                          output=s2_scene['shadows'],
                                          value=options['min_size_shadows'],
                                          mode='greater',
                                          quiet=True)
                    except Exception as e:
                        # todo: remove workaround once r.reclass.area is updated
                        grass.message(
                            _('No shadows larger than %s ha detected. Image is considered shadowfree.'
                              % options['min_size_shadows']))
                        exp_null = '%s = null()' % s2_scene['shadows']
                        grass.run_command('r.mapcalc',
                                          expression=exp_null,
                                          quiet=True)
                else:
                    grass.run_command(
                        'g.copy',
                        raster="%s@%s,%s" %
                        (s2_scene['shadows'], newmapset, s2_scene['shadows']))
            else:
                grass.run_command('r.mapcalc',
                                  expression="%s = null()" %
                                  s2_scene['shadows'])
        grass.utils.try_rmdir(os.path.join(gisdbase, location, newmapset))

    # patch together clouds (and shadows) if they have the same date
    all_dates = []
    dates_scenes = []
    for s2_scene in s2_scenes:
        all_dates.append(s2_scenes[s2_scene]['date'])
    unique_dates = list(set(all_dates))
    for date in unique_dates:
        tempdict = {}
        tempdict['date'] = date
        scenelist = []
        cloudlist = []
        shadowlist = []
        for s2_scene in s2_scenes:
            if s2_scenes[s2_scene]['date'] == date:
                scenelist.append(s2_scene)
                cloudlist.append(s2_scenes[s2_scene]['clouds'])
                if options['output_shadows']:
                    shadowlist.append(s2_scenes[s2_scene]['shadows'])
        tempdict['scenes'] = scenelist
        tempdict['clouds'] = cloudlist
        tempdict['shadows'] = shadowlist
        dates_scenes.append(tempdict)

    for date_scenes in dates_scenes:
        if len(date_scenes['scenes']) > 1:
            cloud_patch = 'clouds_patched_{}'.format(
                date_scenes['date'].strftime('%Y%m%d'))
            rm_rasters.extend(date_scenes['clouds'])
            grass.run_command('r.patch',
                              input=date_scenes['clouds'],
                              output=cloud_patch,
                              quiet=True)
            if options['output_shadows']:
                shadow_patch = 'shadows_patched_{}'.format(
                    date_scenes['date'].strftime('%Y%m%d'))
                rm_rasters.extend(date_scenes['shadows'])
                grass.run_command('r.patch',
                                  input=date_scenes['shadows'],
                                  output=shadow_patch,
                                  quiet=True)
            for scene in date_scenes['scenes']:
                s2_scenes[scene]['clouds'] = cloud_patch
                if options['output_shadows']:
                    s2_scenes[scene]['shadows'] = shadow_patch

    grass.message(_("Create space time raster data set of clouds ..."))
    grass.run_command('t.create',
                      output=strdsout,
                      title="Sentinel-2 cloud mask",
                      desc="Sentinel-2 cloud mask",
                      quiet=True)
    # create register file
    registerfile = grass.tempfile()
    file = open(registerfile, 'w')
    clouds_registered = []
    for s2_scene_name in s2_scenes:
        s2_scene = s2_scenes[s2_scene_name]
        clouds = s2_scene['clouds']
        if clouds not in clouds_registered:
            file.write(
                "%s|%s\n" %
                (clouds, s2_scene['date'].strftime("%Y-%m-%d %H:%M:%S")))
            clouds_registered.append(clouds)
    file.close()
    grass.run_command('t.register',
                      input=strdsout,
                      file=registerfile,
                      quiet=True)
    # remove registerfile
    grass.try_remove(registerfile)

    if options['output_shadows']:
        grass.message(_("Create space time raster data set of shadows ..."))
        grass.run_command('t.create',
                          output=options['output_shadows'],
                          title="Sentinel-2 shadow mask",
                          desc="Sentinel-2 shadow mask",
                          quiet=True)
        # create register file
        registerfile = grass.tempfile()
        file = open(registerfile, 'w')
        shadows_registered = []
        for s2_scene_name in s2_scenes:
            s2_scene = s2_scenes[s2_scene_name]
            shadows = s2_scene['shadows']
            if shadows not in shadows_registered:
                file.write(
                    "%s|%s\n" %
                    (shadows, s2_scene['date'].strftime("%Y-%m-%d %H:%M:%S")))
                shadows_registered.append(shadows)
        file.close()
        grass.run_command('t.register',
                          input=options['output_shadows'],
                          file=registerfile,
                          quiet=True)
        # remove registerfile
        grass.try_remove(registerfile)
Exemple #17
0
def main():

    settings = options['settings']
    scene_names = options['scene_name'].split(',')
    output = options['output']
    nprocs = int(options['nprocs'])
    clouds = int(options['clouds'])
    producttype = options['producttype']
    start = options['start']
    end = options['end']
    datasource = options['datasource']
    use_scenenames = flags['s']
    ind_folder = flags['f']

    if datasource == "USGS_EE" and producttype != "S2MSI1C":
        grass.fatal(
            _('Download from USGS Earth Explorer only supports '
              'Sentinel-2 Level 1C data (S2MSI1C)'))

    # check if we have the i.sentinel.download + i.sentinel.import addons
    if not grass.find_program('i.sentinel.download', '--help'):
        grass.fatal(
            _("The 'i.sentinel.download' module was not found, "
              "install it first: \n g.extension i.sentinel"))

    # Test if all required data are there
    if not os.path.isfile(settings):
        grass.fatal(_("Settings file <{}> not found").format(settings))

    # set some common environmental variables, like:
    os.environ.update(
        dict(GRASS_COMPRESS_NULLS='1',
             GRASS_COMPRESSOR='ZSTD',
             GRASS_MESSAGE_FORMAT='plain'))

    # test nprocs Settings
    if nprocs > mp.cpu_count():
        grass.warning(
            _("Using {} parallel processes but only {} CPUs available."
              'Setting nprocs to {}').format(nprocs, mp.cpu_count(),
                                             mp.cpu_count() - 1))
        nprocs = mp.cpu_cound() - 1

    # sentinelsat allows only three parallel downloads
    elif nprocs > 2 and options['datasource'] == 'ESA_COAH':
        grass.message(
            _("Maximum number of parallel processes for Downloading"
              " fixed to 2 due to sentinelsat API restrictions"))
        nprocs = 2

    # usgs allows maximum 10 parallel downloads
    elif nprocs > 10 and options['dataseource'] == 'USGS_EE':
        grass.message(
            _("Maximum number of parallel processes for Downloading"
              " fixed to 10 due to Earth Explorer restrictions"))
        nprocs = 10

    if use_scenenames:
        scenenames = scene_names
        # check if the filename is valid
        # usgs scenename format will be checked in i.sentinel.download
        if datasource == "ESA_COAH":
            for scene in scenenames:
                if len(scene) < 10 or not scene.startswith('S2'):
                    grass.fatal(
                        _("Please provide scenenames in the format"
                          " S2X_LLLLLL_YYYYMMDDTHHMMSS_"
                          "NYYYY_RZZZ_TUUUUU_YYYYMMDDTHHMMSS.SAFE"))
    else:
        # get a list of scenenames to download
        download_args = {
            'settings': settings,
            'producttype': producttype,
            'start': start,
            'end': end,
            'clouds': clouds,
            'datasource': datasource,
            'flags': 'l'
        }
        if options['limit']:
            download_args['limit'] = options['limit']
        i_sentinel_download_string = grass.parse_command(
            'i.sentinel.download', **download_args)
        i_sentinel_keys = i_sentinel_download_string.keys()
        scenenames = [item.split(' ')[1] for item in i_sentinel_keys]
    # parallelize download
    grass.message(_("Downloading Sentinel-2 data..."))

    # adapt nprocs to number of scenes
    nprocs_final = min(len(scenenames), nprocs)
    queue_download = ParallelModuleQueue(nprocs=nprocs_final)

    for idx, scenename in enumerate(scenenames):
        producttype, start_date, end_date, query_string = scenename_split(
            scenename, datasource, flags['e'])
        # output into separate folders, easier to import in a parallel way:
        if ind_folder:
            outpath = os.path.join(output, 'dl_s2_%s' % str(idx + 1))
        else:
            outpath = output
        i_sentinel_download = Module('i.sentinel.download',
                                     settings=settings,
                                     start=start_date,
                                     end=end_date,
                                     producttype=producttype,
                                     query=query_string,
                                     output=outpath,
                                     datasource=datasource,
                                     run_=False)
        queue_download.put(i_sentinel_download)
    queue_download.wait()
Exemple #18
0
def main():

    settings = options['settings']
    scene_names = options['scene_name'].split(',')
    output = options['output']
    nprocs = int(options['nprocs'])
    clouds = int(options['clouds'])
    producttype = options['producttype']
    start = options['start']
    end = options['end']
    use_scenenames = flags['s']
    ind_folder = flags['f']

    ### check if we have the i.sentinel.download + i.sentinel.import addons
    if not grass.find_program('i.sentinel.download', '--help'):
        grass.fatal(
            _("The 'i.sentinel.download' module was not found, install it first:"
              ) + "\n" + "g.extension i.sentinel")

    ### Test if all required data are there
    if not os.path.isfile(settings):
        grass.fatal(_("Settings file <%s> not found" % (settings)))

    ### set some common environmental variables, like:
    os.environ.update(
        dict(GRASS_COMPRESS_NULLS='1',
             GRASS_COMPRESSOR='ZSTD',
             GRASS_MESSAGE_FORMAT='plain'))

    ### test nprocs Settings
    if nprocs > mp.cpu_count():
        grass.fatal("Using %d parallel processes but only %d CPUs available." %
                    (nprocs, mp.cpu_count()))

    ### sentinelsat allows only three parallel downloads
    elif nprocs > 2:
        grass.message("Maximum number of parallel processes for Downloading" +
                      " fixed to 2 due to sentinelsat API restrictions")
        nprocs = 2

    if use_scenenames:
        scenenames = scene_names
        ### check if the filename is valid
        ### TODO: refine check, it's currently a very lazy check
        if len(scenenames[0]) < 10:
            grass.fatal(
                "No scene names indicated. Please provide scenenames in \
                        the format S2A_MSIL1C_20180822T155901_N0206_R097_T17SPV_20180822T212023.SAFE"
            )
    else:
        ### get a list of scenenames to download
        i_sentinel_download_string = grass.parse_command(
            'i.sentinel.download',
            settings=settings,
            producttype=producttype,
            start=start,
            end=end,
            clouds=clouds,
            flags='l')
        i_sentinel_keys = i_sentinel_download_string.keys()
        scenenames = [item.split(' ')[1] for item in i_sentinel_keys]

    ### parallelize download
    grass.message(_("Downloading Sentinel-2 data..."))

    ### adapt nprocs to number of scenes
    if len(scenenames) == 1:
        nprocs = 1

    queue_download = ParallelModuleQueue(nprocs=nprocs)

    for idx, scenename in enumerate(scenenames):
        producttype, start_date, end_date, query_string = scenename_split(
            scenename)
        ### output into separate folders, easier to import in a parallel way:
        if ind_folder:
            outpath = os.path.join(output, 'dl_s2_%s' % str(idx + 1))
        else:
            outpath = output
        i_sentinel_download = Module('i.sentinel.download',
                                     settings=settings,
                                     start=start_date,
                                     end=end_date,
                                     producttype=producttype,
                                     query=query_string,
                                     output=outpath,
                                     run_=False)
        queue_download.put(i_sentinel_download)
    queue_download.wait()
Exemple #19
0
def main():
    # user specified variables
    dem = options["elevation"]
    slope = options["slope"]
    aspect = options["aspect"]
    neighborhood_size = options["size"]
    output = options["output"]
    nprocs = int(options["nprocs"])
    exponent = float(options["exponent"])

    # check for valid neighborhood sizes
    neighborhood_size = neighborhood_size.split(",")
    neighborhood_size = [int(i) for i in neighborhood_size]

    if any([True for i in neighborhood_size if i % 2 == 0]):
        gs.fatal(
            "Invalid size - neighborhood sizes have to consist of odd numbers")

    if min(neighborhood_size) == 1:
        gs.fatal("Neighborhood sizes have to be > 1")

    # determine nprocs
    if nprocs < 0:
        n_cores = mp.cpu_count()
        nprocs = n_cores - (nprocs + 1)

    # temporary raster map names for slope, aspect, x, y, z components
    if slope == "":
        slope_raster = create_tempname("tmpSlope_")
    else:
        slope_raster = slope

    if aspect == "":
        aspect_raster = create_tempname("tmpAspect_")
    else:
        aspect_raster = aspect

    z_raster = create_tempname("tmpzRaster_")
    x_raster = create_tempname("tmpxRaster_")
    y_raster = create_tempname("tmpyRaster_")

    # create slope and aspect rasters
    if slope == "" or aspect == "":
        gs.message("Calculating slope and aspect...")
        gr.slope_aspect(
            elevation=dem,
            slope=slope_raster,
            aspect=aspect_raster,
            format="degrees",
            precision="FCELL",
            zscale=1.0,
            min_slope=0.0,
            quiet=True,
        )

    # calculate x y and z rasters
    # note - GRASS sin/cos functions differ from ArcGIS which expects input grid in radians
    # whereas GRASS functions expect degrees
    # no need to convert slope and aspect to radians as in the original ArcGIS script
    x_expr = "{x} = float( sin({a}) * sin({b}) )".format(x=x_raster,
                                                         a=aspect_raster,
                                                         b=slope_raster)

    y_expr = "{y} = float( cos({a}) * sin({b}) )".format(y=y_raster,
                                                         a=aspect_raster,
                                                         b=slope_raster)

    z_expr = "{z} = float( cos({a}) )".format(z=z_raster, a=slope_raster)

    # calculate x, y, z components (parallel)
    gs.message("Calculating x, y, and z rasters...")

    mapcalc = Module("r.mapcalc", run_=False)
    queue = ParallelModuleQueue(nprocs=nprocs)

    mapcalc1 = copy.deepcopy(mapcalc)
    m = mapcalc1(expression=x_expr)
    queue.put(m)

    mapcalc2 = copy.deepcopy(mapcalc)
    m = mapcalc2(expression=y_expr)
    queue.put(m)

    mapcalc3 = copy.deepcopy(mapcalc)
    m = mapcalc3(expression=z_expr)
    queue.put(m)

    queue.wait()

    # calculate x, y, z neighborhood sums (parallel)
    gs.message(
        "Calculating sums of x, y, and z rasters in selected neighborhoods...")

    x_sum_list = []
    y_sum_list = []
    z_sum_list = []

    neighbors = Module("r.neighbors", overwrite=True, run_=False)
    queue = ParallelModuleQueue(nprocs=nprocs)

    for size in neighborhood_size:
        # create temporary raster names for neighborhood x, y, z sums
        x_sum_raster = create_tempname("tmpxSumRaster_")
        x_sum_list.append(x_sum_raster)

        y_sum_raster = create_tempname("tmpySumRaster_")
        y_sum_list.append(y_sum_raster)

        z_sum_raster = create_tempname("tmpzSumRaster_")
        z_sum_list.append(z_sum_raster)

        # create weights
        mat = idw_weights(size, exponent)

        # queue jobs for x, y, z neighborhood sums
        neighbors_xsum = copy.deepcopy(neighbors)
        n = neighbors_xsum(
            input=x_raster,
            output=x_sum_raster,
            method="average",
            size=size,
            weight=mat,
            stdin=mat,
        )
        queue.put(n)

        neighbors_ysum = copy.deepcopy(neighbors)
        n = neighbors_ysum(
            input=y_raster,
            output=y_sum_raster,
            method="average",
            size=size,
            weight=mat,
        )
        queue.put(n)

        neighbors_zsum = copy.deepcopy(neighbors)
        n = neighbors_zsum(
            input=z_raster,
            output=z_sum_raster,
            method="average",
            size=size,
            weight=mat,
        )
        queue.put(n)

    queue.wait()

    # calculate the resultant vector and final ruggedness raster
    # modified from the original script to multiple each SumRaster by the n neighborhood
    # cells to get the sum
    gs.message("Calculating the final ruggedness rasters...")

    mapcalc = Module("r.mapcalc", run_=False)
    queue = ParallelModuleQueue(nprocs=nprocs)
    vrm_list = []

    for x_sum_raster, y_sum_raster, z_sum_raster, size in zip(
            x_sum_list, y_sum_list, z_sum_list, neighborhood_size):

        if len(neighborhood_size) > 1:
            vrm_name = "_".join([output, str(size)])
        else:
            vrm_name = output

        vrm_list.append(vrm_name)

        vrm_expr = "{x} = float(1-( (sqrt(({a}*{d})^2 + ({b}*{d})^2 + ({c}*{d})^2) / {d})))".format(
            x=vrm_name,
            a=x_sum_raster,
            b=y_sum_raster,
            c=z_sum_raster,
            d=int(size) * int(size),
        )
        mapcalc1 = copy.deepcopy(mapcalc)
        m = mapcalc1(expression=vrm_expr)
        queue.put(m)

    queue.wait()

    # set colors
    gr.colors(flags="e", map=vrm_list, color="ryb")

    # set metadata
    for vrm, size in zip(vrm_list, neighborhood_size):
        title = "Vector Ruggedness Measure (size={size})".format(size=size)
        gr.support(map=vrm, title=title)

    return 0
def main():
    elev = options["input"]
    output = options["output"]
    n_dir = int(options["ndir"])
    notParallel = flags["p"]

    global TMP_NAME, CLEANUP

    if options["basename"]:
        TMP_NAME = options["basename"]
        CLEANUP = False

    colorized_output = options["colorized_output"]
    colorize_color = options["color_table"]

    if colorized_output:
        color_raster_tmp = TMP_NAME + "_color_raster"
    else:
        color_raster_tmp = None

    color_raster_type = options["color_source"]
    color_input = options["color_input"]

    if color_raster_type == "color_input" and not color_input:
        gcore.fatal(_("Provide raster name in color_input option"))

    if color_raster_type != "color_input" and color_input:
        gcore.fatal(
            _(
                "The option color_input is not needed"
                " when not using it as source for color"
            )
        )
    # this would be needed only when no value would allowed
    if not color_raster_type and color_input:
        color_raster_type = "color_input"  # enable for convenience

    if (
        color_raster_type == "aspect"
        and colorize_color
        and colorize_color not in ["default", "aspectcolr"]
    ):
        gcore.warning(
            _(
                "Using possibly inappropriate color table <{}>"
                " for aspect".format(colorize_color)
            )
        )

    horizon_step = 360.0 / n_dir
    horizon_intervals = np.arange(0, 360, horizon_step)
    msgr = get_msgr()

    # checks if there are already some maps
    old_maps = _get_horizon_maps()

    if old_maps:
        if not gcore.overwrite():
            CLEANUP = False
            msgr.fatal(
                _(
                    "You have to first check overwrite flag or remove"
                    " the following maps:\n"
                    "{names}"
                ).format(names=",".join(old_maps))
            )
        else:
            msgr.warning(
                _("The following maps will be overwritten: {names}").format(
                    names=",".join(old_maps)
                )
            )

    if not gcore.overwrite() and color_raster_tmp:
        check_map_name(color_raster_tmp)

    try:
        if notParallel is False:
            if options["maxdistance"]:
                maxdistance = float(options["maxdistance"])
            else:
                maxdistance = None

            r_horizon = Module(
                "r.horizon",
                elevation=elev,
                maxdistance=maxdistance,
                flags="d",
                run_=False,
            )

            queue = ParallelModuleQueue(nprocs=int(options["processes"]))

            for d in horizon_intervals:
                r_horizon_prc = deepcopy(r_horizon)
                r_horizon_prc.inputs.direction = d
                r_horizon_prc.outputs.output = TMP_NAME
                queue.put(r_horizon_prc)

            queue.wait()

        else:
            params = {}
            if options["maxdistance"]:
                params["maxdistance"] = options["maxdistance"]

            gcore.run_command(
                "r.horizon",
                elevation=elev,
                step=horizon_step,
                output=TMP_NAME,
                flags="d",
                **params
            )

        new_maps = _get_horizon_maps()

        if flags["o"]:
            msgr.message(_("Computing openness ..."))
            expr = "{out} = 1 - (sin({first}) ".format(first=new_maps[0], out=output)
            for horizon in new_maps[1:]:
                expr += "+ sin({name}) ".format(name=horizon)
            expr += ") / {n}.".format(n=len(new_maps))
        else:
            msgr.message(_("Computing skyview factor ..."))
            expr = "{out} = 1 - (sin( if({first} < 0, 0, {first}) ) ".format(
                first=new_maps[0], out=output
            )
            for horizon in new_maps[1:]:
                expr += "+ sin( if({name} < 0, 0, {name}) ) ".format(name=horizon)
            expr += ") / {n}.".format(n=len(new_maps))
        
        grast.mapcalc(exp=expr)
        gcore.run_command("r.colors", map=output, color="grey")

    except CalledModuleError:
        msgr.fatal(
            _(
                "r.horizon failed to compute horizon elevation "
                "angle maps. Please report this problem to developers."
            )
        )
        return 1

    if colorized_output:
        if color_raster_type == "slope":
            gcore.run_command("r.slope.aspect", elevation=elev, slope=color_raster_tmp)
        elif color_raster_type == "aspect":
            gcore.run_command("r.slope.aspect", elevation=elev, aspect=color_raster_tmp)
        elif color_raster_type == "dxy":
            gcore.run_command("r.slope.aspect", elevation=elev, dxy=color_raster_tmp)
        elif color_raster_type == "color_input":
            color_raster_tmp = color_input
        else:
            color_raster_tmp = elev

        # don't modify user's color table for inputs
        if colorize_color and color_raster_type not in ["input", "color_input"]:
            rcolors_flags = ""

            if flags["n"]:
                rcolors_flags += "n"

            gcore.run_command(
                "r.colors",
                map=color_raster_tmp,
                color=colorize_color,
                flags=rcolors_flags,
            )

        gcore.run_command(
            "r.shade", shade=output, color=color_raster_tmp, output=colorized_output
        )
        grast.raster_history(colorized_output)

    grast.raster_history(output)

    return 0
Exemple #21
0
def main():
    """Do the real work
    """
    #Parse remaining variables
    network_map = options['input']
    # network_mapset = network_map.split('@')[0]
    network = network_map.split('@')[1] if len(
        network_map.split('@')) > 1 else None
    suffix = options['suffix']
    layer = options['layer']
    corridor_tolerance = options['corridor_tolerance']
    cores = options['cores']
    where = None if options['where'] == '' else options['where']
    weights = options['weights'].split(',')
    s_flag = flags['s']
    d_flag = flags['d']
    r_flag = flags['r']

    ulimit = resource.getrlimit(resource.RLIMIT_NOFILE)

    net_hist_str = grass.read_command('v.info', map=network_map,
                                      flags='h').split('\n')[0].split(': ')[1]

    dist_cmd_dict = task.cmdstring_to_tuple(net_hist_str)

    dist_prefix = dist_cmd_dict[1]['prefix']
    #network_prefix = dist_cmd_dict[1]['prefix']

    #print(where)

    # in_vertices = dist_cmd_dict[1]['input']

    #Check if db-connection for edge map exists
    con = vect.vector_db(network_map)[int(layer)]
    if not con:
        grass.fatal("Database connection for map {} \
                    is not defined for layer {}.".format(network, layer))

    #Check if required columns exist and are of required type
    required_columns = ['con_id_u', 'from_p', 'to_p', 'cd_u']
    if weights:
        required_columns += weights

    in_columns = vect.vector_columns(network_map, layer=layer)

    missing_columns = np.setdiff1d(required_columns, in_columns.keys())

    if missing_columns:
        grass.fatal("Cannot find the following reqired/requested \
                    column(s) {} in vector map \
                    {}.".format(', '.join(missing_columns), network))

    #
    weight_types = []
    # Check properly if column is numeric
    for col in required_columns:
        if in_columns[col]['type'] not in [
                'INTEGER', 'DOUBLE PRECISION', 'REAL'
        ]:
            grass.fatal("Column {} is of type {}. \
                         Only numeric types (integer, \
                         real or double precision) \
                         allowed!".format(col, in_columns[col]['type']))

        if col in weights:
            weight_types.append(in_columns[col]['type'])

    # Extract necessary informartion on edges from attribute table of
    # edge map
    table_io = StringIO(
        unicode(
            grass.read_command('v.db.select',
                               flags='c',
                               map=network_map,
                               columns=required_columns,
                               separator=',',
                               where=where)))

    try:
        table_extract = np.genfromtxt(table_io,
                                      delimiter=',',
                                      dtype=None,
                                      names=required_columns)
    except:
        grass.fatal('No edges selected to compute corridors for...')

    # Output result of where-clause and exit (if requested)
    if s_flag:
        print(table_extract)
        #grass.message("con_id_u|from_p|to_p")
        #for fid in $selected_edges_ud:
        #    message_text = $(echo $table_extract | tr ' ' '\n' |
        # tr ',' ' ' | awk -v FID=$fid '{if($1==FID) print $1 "|" $2 "|"
        #  $3}' | head -n 1)
        #    grass.message(message_text)
        sys.exit(0)

    #Get unique identifiers for the selected undirected edges
    selected_patches = np.unique(
        np.append(table_extract['from_p'], table_extract['to_p']))

    selected_edges = np.unique(table_extract['con_id_u'])

    # activate z-flag if more maps have to be aggregated than ulimit
    z_flag = None if len(selected_edges) < ulimit else 'z'

    #Check if cost distance raster maps exist
    pattern = "{}_patch_*_cost_dist".format(dist_prefix)
    patchmaps = grass.read_command('g.list', pattern=pattern,
                                   type='raster').rstrip('\n').split('\n')

    for patch in selected_patches:
        #Check if cost distance raster maps exist
        patchmap = "{}_patch_{}_cost_dist".format(dist_prefix, patch)
        if not patchmap in patchmaps:
            grass.fatal("Cannot find raster map {}.".format(patchmap))

    #Create mapcalculator expressions for cost distance corridors,
    # assigning distance values
    corridormaps = {}
    if d_flag:
        pattern = "{}_corridor_*_cost_dist".format(dist_prefix)
        corridor_base = 'dist'
    else:
        pattern = "{}_corridor_[0-9]+$".format(dist_prefix)
        corridor_base = 'id'

    corridormaps[corridor_base] = grass.read_command(
        'g.list', flags='e', pattern=pattern,
        type='raster').rstrip('\n').split('\n')
    for weight in weights:
        pattern = "{}_corridor_[0-9]+_{}".format(dist_prefix, weight)
        corridormaps[weight] = grass.read_command(
            'g.list', flags='e', pattern=pattern,
            type='raster').rstrip('\n').split('\n')

    # Setup GRASS modules for raster processing
    mapcalc = Module("r.mapcalc", quiet=True, run_=False)
    reclass = Module("r.reclass", rules='-', quiet=True, run_=False)
    recode = Module("r.recode", rules='-', quiet=True, run_=False)

    # Setup paralel module queue if parallel processing is requested
    #print(weight_types)
    if cores > 1:
        mapcalc_queue = ParallelModuleQueue(nprocs=cores)

        if 'INTEGER' in weight_types:
            reclass_queue = ParallelModuleQueue(nprocs=cores)

        if 'REAL' in weight_types or 'DOUBLE PRECISION' in weight_types:
            recode_queue = ParallelModuleQueue(nprocs=cores)

    corridor_list = []
    for edge_id in selected_edges:
        edge = table_extract[table_extract['con_id_u'] == edge_id][0]
        #print(e.dtype.names)
        if d_flag:
            corridor = "{}_corridor_{}_cost_dist".format(dist_prefix, edge_id)
            #corridor_list.append(corridor)
            mc_expression = "{prefix}_corridor_{CON_ID}_cost_dist=if( \
            ({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist) - \
            (({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist) * \
            {cor_tolerance}/100.0)<= \
            ({prefix}_patch_{FROM_P}_cost_dist + \
            {prefix}_patch_{TO_P}_cost_dist), \
            ({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist), \
            null())".format(prefix=dist_prefix,
                            CON_ID=edge['con_id_u'],
                            FROM_P=edge['from_p'],
                            TO_P=edge['to_p'],
                            cor_tolerance=corridor_tolerance)
        else:
            corridor = "{}_corridor_{}".format(dist_prefix, edge['con_id_u'])
            #corridor_list.append(corridor)
            # Create mapcalculator expressions for cost distance
            # corridors, assigning connection IDs for reclassification
            mc_expression = "{prefix}_corridor_{CON_ID}=if( \
            ({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist)- \
            (({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist)* \
            {cor_tolerance}/100.0)<={CD}, \
            {CON_ID}, null())".format(prefix=dist_prefix,
                                      CON_ID=edge['con_id_u'],
                                      FROM_P=edge['from_p'],
                                      TO_P=edge['to_p'],
                                      CD=edge['cd_u'],
                                      cor_tolerance=corridor_tolerance)

        corridor_list.append(corridor)
        #print(corridor)
        #print(corridormaps)

        if r_flag or corridor not in corridormaps[corridor_base]:
            new_mapcalc = copy.deepcopy(mapcalc)

            if cores > 1:
                calc = new_mapcalc(expression=mc_expression)
                mapcalc_queue.put(calc)
            else:
                calc = new_mapcalc(expression=mc_expression,
                                   region='intersect')
                calc.run()

        for weight in weights:
            if r_flag or corridor not in corridormaps[weight]:
                in_map = corridor
                out_map = '{}_{}'.format(in_map, weight)
                if in_columns[weight]['type'] == 'INTEGER':
                    new_reclass = copy.deepcopy(reclass)
                    reclass_rule = "{} = {}".format(edge['con_id_u'],
                                                    edge[weight])
                    rcl = new_reclass(input=in_map,
                                      output=out_map,
                                      stdin_=reclass_rule)

                    if cores > 1:
                        reclass_queue.put(rcl)
                    else:
                        rcl.run()

                if in_columns[weight]['type'] in ['REAL', 'DOUBLE PRECISION']:
                    new_recode = copy.deepcopy(recode)
                    recode_rule = "{0}:{0}:{1}:{1}".format(
                        edge['con_id_u'], edge[weight])
                    rco = new_recode(input=in_map,
                                     output=out_map,
                                     stdin_=recode_rule)
                    if cores > 1:
                        recode_queue.put(rco)
                    else:
                        rco.run()

    if cores > 1:
        mapcalc_queue.wait()
        if 'INTEGER' in weight_types:
            reclass_queue.wait()
        if 'REAL' in weight_types or 'DOUBLE PRECISION' in weight_types:
            recode_queue.wait()

    grass.verbose('Aggregating corridor maps...')

    if d_flag:
        grass.run_command('r.series',
                          flags=z_flag,
                          quiet=True,
                          input=','.join(corridor_list),
                          output='{}_corridors_min_cost_dist_{}'.format(
                              dist_prefix, suffix),
                          method='minimum')
    else:
        #Summarize corridors
        if not weights:
            print(','.join(corridor_list))
            output_map = '{}_corridors_count_{}'.format(dist_prefix, suffix)
            grass.run_command('r.series',
                              flags=z_flag,
                              quiet=True,
                              input=','.join(corridor_list),
                              output=output_map,
                              method='count')
            write_raster_history(output_map)

        else:
            #Weight corridors according to user requested weights
            for weight in weights:
                # Generate corridor map list
                corridor_map_list = (cm + '_{}'.format(weight)
                                     for cm in corridor_list)
                output_map = '{}_corridors_{}_sum_{}'.format(
                    dist_prefix, weight, suffix)
                #Summarize corridors using r.series
                grass.run_command('r.series',
                                  flags=z_flag,
                                  quiet=True,
                                  input=corridor_map_list,
                                  output=output_map,
                                  method='sum')
                write_raster_history(output_map)
Exemple #22
0
def main():

    global rm_regions, rm_rasters, rm_vectors, tmpfolder

    # parameters
    strds = options['input']
    strdsout = options['output_clouds']
    threshold = float(options['threshold'])

    test_nprocs()

    # test if necessary GRASS GIS addons are installed
    if not grass.find_program('i.sentinel.mask', '--help'):
        grass.fatal(
            _("The 'i.sentinel.mask' module was not found, install it first:")
            + "\n" + "g.extension i.sentinel")
    if not grass.find_program('i.sentinel.mask.worker', '--help'):
        grass.fatal(
            _("The 'i.sentinel.mask.worker' module was not found, install it first:"
              ) + "\n" + "g.extension i.sentinel.mask.worker url=...")

    strdsrasters = [
        x.split('|')[0]
        for x in grass.parse_command('t.rast.list', input=strds, flags='u')
    ]
    times = [
        x.split('|')[2]
        for x in grass.parse_command('t.rast.list', input=strds, flags='u')
    ]
    s2_scenes = dict()
    for strdsrast, time in zip(strdsrasters, times):
        parts = strdsrast.split('_')
        name = "%s_%s" % (parts[0], parts[1])
        band = parts[2]
        if name not in s2_scenes:
            s2_scene = {
                'B02': None,
                'B03': None,
                'B04': None,
                'B08': None,
                'B8A': None,
                'B11': None,
                'B12': None,
                'date': None
            }
            s2_scene['clouds'] = "%s_clouds" % name
            if options['output_shadows']:
                s2_scene['shadows'] = "%s_shadows" % name
                s2_scene['shadows'] = "%s_shadows" % name
            if threshold > 0 or options['output_shadows']:
                if options['metadata'] == 'default':
                    env = grass.gisenv()
                    json_standard_folder = os.path.join(
                        env['GISDBASE'], env['LOCATION_NAME'], env['MAPSET'],
                        'cell_misc')
                    s2_scene['metadata'] = os.path.join(
                        json_standard_folder, strdsrast, "description.json")
                elif options['metadata']:
                    json_standard_folder = options['metadata']
                    s2_scene['metadata'] = os.path.join(
                        json_standard_folder, strdsrast, "description.json")
            s2_scenes[name] = s2_scene
        s2_scenes[name][band] = strdsrast
        if not s2_scenes[name]['date']:
            if '.' in time:
                dateformat = '%Y-%m-%d %H:%M:%S.%f'
            else:
                dateformat = '%Y-%m-%d %H:%M:%S'
            s2_scenes[name]['date'] = datetime.strptime(time, dateformat)

    # check if all input bands are in strds
    for key in s2_scenes:
        if any([val is None for key2, val in s2_scenes[key].items()]):
            grass.fatal(_("Not all needed bands are given"))

    grass.message(_("Find clouds (and shadows) in Sentinel scenes ..."))
    env = grass.gisenv()
    start_gisdbase = env['GISDBASE']
    start_location = env['LOCATION_NAME']
    start_cur_mapset = env['MAPSET']

    queue = ParallelModuleQueue(nprocs=options['nprocs'])
    bands = ['B02', 'B03', 'B04', 'B08', 'B8A', 'B11', 'B12']
    number_of_scenes = len(s2_scenes)
    number = 0
    for s2_scene_name in s2_scenes:
        s2_scene = s2_scenes[s2_scene_name]
        number += 1
        grass.message(
            _("Processing %d of %d scenes") % (number, number_of_scenes))
        if threshold > 0:
            with open(s2_scene['metadata'], 'r') as f:
                data = json.load(f)
            if threshold > float(data['CLOUDY_PIXEL_PERCENTAGE']):
                computingClouds = False
            else:
                computingClouds = True
        else:
            computingClouds = True
        for band in bands:
            rm_rasters.append("%s_double" % s2_scene[band])
        if computingClouds:
            kwargs = dict()
            if options['output_shadows']:
                kwargs['shadow_raster'] = s2_scene['shadows']
                kwargs['metadata'] = s2_scene['metadata']
                kwargs['shadow_threshold'] = 1000
                flags = 's'
            else:
                flags = 'sc'
            newmapset = s2_scene['clouds']
            # grass.run_command(
            i_sentinel_mask = Module(
                'i.sentinel.mask.worker',
                blue="%s@%s" % (s2_scene['B02'], start_cur_mapset),
                green="%s@%s" % (s2_scene['B03'], start_cur_mapset),
                red="%s@%s" % (s2_scene['B04'], start_cur_mapset),
                nir="%s@%s" % (s2_scene['B08'], start_cur_mapset),
                nir8a="%s@%s" % (s2_scene['B8A'], start_cur_mapset),
                swir11="%s@%s" % (s2_scene['B11'], start_cur_mapset),
                swir12="%s@%s" % (s2_scene['B12'], start_cur_mapset),
                flags=flags,
                cloud_raster=s2_scene['clouds'],
                newmapset=newmapset,
                quiet=True,
                run_=False,
                **kwargs)
            queue.put(i_sentinel_mask)
    queue.wait()

    # verify that switching the mapset worked
    env = grass.gisenv()
    gisdbase = env['GISDBASE']
    location = env['LOCATION_NAME']
    cur_mapset = env['MAPSET']
    if cur_mapset != start_cur_mapset:
        grass.fatal("New mapset is <%s>, but should be <%s>" %
                    (cur_mapset, start_cur_mapset))

    # copy maps to current mapset
    for s2_scene_name in s2_scenes:
        s2_scene = s2_scenes[s2_scene_name]
        newmapset = s2_scene['clouds']
        if grass.find_file(s2_scene['clouds'],
                           element='raster',
                           mapset=newmapset)['file']:
            grass.run_command(
                'g.copy',
                raster="%s@%s,%s" %
                (s2_scene['clouds'], newmapset, s2_scene['clouds']))
        else:
            grass.run_command('r.mapcalc',
                              expression="%s = null()" % s2_scene['clouds'])
        if options['output_shadows']:
            if grass.find_file(s2_scene['shadows'],
                               element='raster',
                               mapset=newmapset)['file']:
                grass.run_command(
                    'g.copy',
                    raster="%s@%s,%s" %
                    (s2_scene['shadows'], newmapset, s2_scene['shadows']))
            else:
                grass.run_command('r.mapcalc',
                                  expression="%s = null()" %
                                  s2_scene['shadows'])
        grass.utils.try_rmdir(os.path.join(gisdbase, location, newmapset))

    grass.message(_("Create space time raster data set of clouds ..."))
    grass.run_command('t.create',
                      output=strdsout,
                      title="Sentinel-2 cloud mask",
                      desc="Sentinel-2 cloud mask",
                      quiet=True)
    # create register file
    registerfile = grass.tempfile()
    file = open(registerfile, 'w')
    for s2_scene_name in s2_scenes:
        s2_scene = s2_scenes[s2_scene_name]
        file.write("%s|%s\n" %
                   (s2_scene['clouds'],
                    s2_scene['date'].strftime("%Y-%m-%d %H:%M:%S")))
    file.close()
    grass.run_command('t.register',
                      input=strdsout,
                      file=registerfile,
                      quiet=True)
    # remove registerfile
    grass.try_remove(registerfile)

    if options['output_shadows']:
        grass.message(_("Create space time raster data set of shadows ..."))
        grass.run_command('t.create',
                          output=options['output_shadows'],
                          title="Sentinel-2 shadow mask",
                          desc="Sentinel-2 shadow mask",
                          quiet=True)
        # create register file
        registerfile = grass.tempfile()
        file = open(registerfile, 'w')
        for s2_scene_name in s2_scenes:
            s2_scene = s2_scenes[s2_scene_name]
            file.write("%s|%s\n" %
                       (s2_scene['shadows'],
                        s2_scene['date'].strftime("%Y-%m-%d %H:%M:%S")))
        file.close()
        grass.run_command('t.register',
                          input=options['output_shadows'],
                          file=registerfile,
                          quiet=True)
        # remove registerfile
        grass.try_remove(registerfile)
Exemple #23
0
        ms = msk.get_registered_maps(
            columns='name', where="start_time = '{}'".format(date))[0][0]
        output = '{}_{}'.format(options['basename'], idx)
        compute(b4, b8, ms, output, str(idx))

        data.append((output, date))

        idx += 1

    queue.wait()

    idx = 1
    fd = open(options['output'], 'w')
    for output, date in data:
        stats(output, date, fd)
        cleanup(str(idx))
        idx += 1

    fd.close()

    return 0


if __name__ == "__main__":
    options, flags = parser()

    # queue for parallel jobs
    queue = ParallelModuleQueue(int(options['nprocs']))

    sys.exit(main())
Exemple #24
0
def main():
    options, flags = gscript.parser()
    input_dir = options['input']
    sizeMovingWindow = options['size']
    distance = options['distance']
    numCategories = options['categories']
    recode = flags['r']
    use_dem = flags['d']

    start_time = time.time()

    try:
        # Validate parameters
        if (int(sizeMovingWindow) % 2 == 0) or (int(sizeMovingWindow) < 2) or \
          (int(distance) >= int(sizeMovingWindow)) or (int(numCategories) > 255):
            raise
    except:
        print "Size of moving windows must be odd and >=3."
        print "The distance must be smaller than the size of the moving window."
        print "The raster map cannot have more than 255 categories."
    else:
        gscript.run_command('g.mapset', mapset='PERMANENT')

        # r=root, d=directories, f = files
        for r, d, f in os.walk(input_dir):
            print "Checking directory: ", r
            count = 1
            for file in f:
                # Import bands and set the region
                full_path = os.path.join(r, file)
                if (full_path[-8:-4] == "_reg") and (not use_dem):
                    gscript.run_command('g.proj', flags='c', georef=full_path)
                    gscript.run_command('r.in.gdal',
                                        flags='k',
                                        input=full_path,
                                        output='region_raster',
                                        overwrite=True)
                    gscript.run_command('g.region',
                                        raster='region_raster',
                                        overwrite=True)

                if (full_path[-8:-4] == "_dem") and (use_dem):
                    print "DEM: ", full_path

                    gscript.run_command('g.proj', flags='c', georef=full_path)
                    gscript.run_command('r.in.gdal',
                                        flags='k',
                                        input=full_path,
                                        output='inputBands.1',
                                        overwrite=True)
                    ## Manages the boundary definitions for the geographic region.
                    gscript.run_command('g.region', raster='inputBands.1')
                    output_file = full_path[:-4] + "_text.tif"
                    print "Output File to be created: ", output_file
                    count += 1

                if (full_path[-9:-5] == "band") and (not use_dem):
                    if (count == 1):
                        output_file = full_path[:-10] + "_text.tif"
                        print "Output File to be created: ", output_file
                    gscript.run_command('r.in.gdal',
                                        flags='k',
                                        input=full_path,
                                        output='inputBands.{}'.format(count),
                                        overwrite=True)
                    count += 1

            if (count == 8) or (count == 2):

                bandCount = count - 1
                rasterName = []

                inputRescale = "inputBands.{}"
                outputRescale = "bandsRescale.{}"

                # Quantize image so textures can be perfom
                if recode == False:
                    for band in range(1, bandCount + 1):
                        rules = gscript.read_command(
                            'r.quantile',
                            flags='r',
                            input=inputRescale.format(band),
                            quantiles=numCategories,
                            overwrite=True,
                            quiet=True)
                        print "Recoding raster map {}".format(band)
                        gscript.write_command(
                            'r.recode',
                            input=inputRescale.format(band),
                            output=outputRescale.format(band),
                            rules='-',
                            overwrite=True,
                            stdin=rules)
                else:
                    for band in range(1, bandCount + 1):
                        gscript.run_command('r.rescale',
                                            input=inputRescale.format(band),
                                            output=outputRescale.format(band),
                                            to='1,' + numCategories,
                                            overwrite=True)

                outputTexture = "band.{}"

                # Calculate textures using parallel jobs
                queue = ParallelModuleQueue(nprocs=4)
                texture = Module(
                    'r.texture',
                    flags='n',
                    method=['asm', 'contrast', 'var', 'idm', 'entr'],
                    size=sizeMovingWindow,
                    distance=distance,
                    overwrite=True,
                    run_=False)

                for band in range(1, bandCount + 1):
                    m = deepcopy(texture)(input=outputRescale.format(band),
                                          output=outputTexture.format(band))
                    queue.put(m)
                queue.wait()

                # Generate group to export the tiff image. This is required to stack all the bands in the same file.
                for band in range(1, bandCount + 1):
                    rasterName.extend([
                        outputTexture.format(band) + '_ASM',
                        outputTexture.format(band) + '_Contr',
                        outputTexture.format(band) + '_Var',
                        outputTexture.format(band) + '_IDM',
                        outputTexture.format(band) + '_Entr'
                    ])
                    groupInput = (outputTexture.format(band) + '_ASM,' +
                                  outputTexture.format(band) + '_Contr,' +
                                  outputTexture.format(band) + '_Var,' +
                                  outputTexture.format(band) + '_IDM,' +
                                  outputTexture.format(band) + '_Entr')
                    gscript.run_command('i.group',
                                        group='outFileBands',
                                        input=groupInput,
                                        quiet=True)

                # Create the output tiff image
                gscript.run_command('r.out.gdal',
                                    flags='cm',
                                    input='outFileBands',
                                    output=output_file,
                                    format='GTiff',
                                    type='Float32',
                                    overwrite=True,
                                    verbose=True)

                print "Finished creating features"

                # Edit metadata to give a name to each band
                print "Renaming raster bands"
                src_ds = gdal.Open(output_file)
                for band in range(src_ds.RasterCount):
                    band += 1
                    src_ds.GetRasterBand(band).SetDescription(rasterName[band -
                                                                         1])

        elapsed_time = time.time() - start_time
        print "Finished in: ", elapsed_time, " seconds"