def sample_absolute(input, layer, timestamp_column, column, t_raster,
                    where, i_flag):
    """Point sampling in STRDS with absolute temporal type
    """
    start = t_raster["start_time"]
    end = t_raster["end_time"]
    raster_map = '{}@{}'.format(t_raster["name"], t_raster["mapset"])
    where += """({0} >= date('{1}') AND \
                {0} < date('{2}'))""".format(timestamp_column, start, end)

    grass.verbose(_('Sampling points between {} and {}'.format(start, end)))

    # If only one core is used, processing can be faster if computational region is temporarily moved
    # to where datapoints are (e.g. in case of tracking data)
    # Move computational region temporarily to where points are in
    # in space and time
    treg = grass.parse_command('v.db.select', flags='r',
                               map=input, where=where, quiet=True) # stderr=subproess.PIPE,

    if len(set(treg.values())) > 1:
        grass.use_temp_region()
        grass.run_command("g.region", n=treg['n'], s=treg['s'], e=treg['e'],
                          w=treg['w'], align=raster_map)

        # Sample spatio-temporally matching points and raster map
        rast_what = Module('v.what.rast', map=input, layer=layer,
                           column=column, raster=raster_map, where=where,
                           stderr_=DEVNULL, run_=False, quiet=True)
        rast_what.flags.i = i_flag
        rast_what.run()
Ejemplo n.º 2
0
def run_command(*args, **kwargs):
    """Wrap pygrass.Module.run method to log commands"""
    kwargs["run_"] = False
    kwargs["quiet"] = True
    mod = Module(*args, **kwargs)
    # print(f"\n» Execute: `{' '.join(mod.make_cmd())}`")
    mod.run()
    return mod
def main(argv):
    print 'computing '
    step=float(argv[0])
    elevin=argv[1]
    start_day=int(argv[2])
    stop_day=int(argv[3])
    parent_dir = os.path.dirname(os.path.dirname(os.getcwd()))
    out_folder=os.path.join(parent_dir,"raw_data","rn")
    
    #compute irradiance for each day
    for day in range(start_day,stop_day):
        rsun = Module("r.sun", elevation=elevin,step=step,glob_rad='rad.global.'+str(day),overwrite=True,run_=False, day=day)
        rsun.run()
        #r.out.gdal geotiff does not include datum; use AAIGrid format for output raster
        rout = Module("r.out.gdal",overwrite=True,f=True,input='rad.global.'+str(day),output=os.path.join(out_folder,'rn'+str(day)),format="AAIGrid")
        gremove = Module("g.remove",f=True,type='raster',name='rad.global.'+str(day))
Ejemplo n.º 4
0
def main():

    # parameters - file name and extension
    outputfile = options['file']
    ext = outputfile.split('.')
    if len(ext) == 1:
        grass.fatal("Please provide the file extension of the output file")
    filetype = options['filetype']
    if filetype == 'cairo':
        allowed = ('.png', '.bmp', 'ppm', 'pdf', 'ps', 'svg')
        if not outputfile.lower().endswith(allowed):
            grass.fatal("Unknown display driver <{}>".format(ext[1]))
    if filetype == "ps" and not ext[1] == "ps":
        grass.fatal("The file type <{}> does not match the file extension <"
                    "{}>".format(filetype, ext[1]))
    if filetype == "png" and not ext[1] == "png":
        grass.fatal("The file type <{}> does not match the file extension <"
                    "{}>".format(filetype, ext[1]))

    # parameters - image settings
    unit = options['unit']
    resol = options['resolution']
    if resol == '':
        if unit == 'px':
            resol = 96
        else:
            resol = 300
    else:
        resol = int(resol)
    dimensions = options['dimensions']
    width, height = dimensions.split(",")
    bgcolor = options['color']
    inmap = options['raster']
    labelnum = options['labelnum']
    vr = options['range']
    font = options['font']
    fontsize = int(options['fontsize'])
    digits = int(options['digits'])
    labval = options['label_values']
    labstep = options['label_step']

    # flag parameters
    flag_f = flags['f']
    flag_d = flags['d']
    flag_t = flags['t']
    if flag_t:
        tagmargin = 9
    else:
        tagmargin = 4

    # Compute output size of legend bar in pixels
    if unit == 'cm':
        bw = math.ceil(float(width) / 2.54 * float(resol))
        bh = math.ceil(float(height) / 2.54 * float(resol))
    elif unit == 'mm':
        bw = math.ceil(float(width) / 25.4 * float(resol))
        bh = math.ceil(float(height) / 25.4 * float(resol))
    elif unit == 'inch':
        bw = math.ceil(float(width) * float(resol))
        bh = math.ceil(float(height) * float(resol))
    elif unit == "px":
        bw = float(width)
        bh = float(height)
    else:
        grass.error('Unit must be inch, cm, mm or px')

    # Add size of legend to w or h, if flag_d is set
    # Add size of tics
    if flag_d:
        histmargin = 2.75
    else:
        histmargin = 1
    if float(height) > float(width):
        w = bw * histmargin + tagmargin
        h = bh + 4
    else:
        h = bh * histmargin + tagmargin
        w = bw + 4

    # Determine image width and height
    if fontsize == 0:
        fz = 1
    else:
        fz = round(float(fontsize) * (float(resol) / 72.272))

    # Determine space at left and right (or top and bottom)
    # based on fontsize (fz) and number of digits
    maprange = grass.raster_info(inmap)
    maxval = round(maprange['max'], digits)
    minval = round(maprange['min'], digits)
    if maxval < 1:
        maxl = len(str(maxval)) - 1
    else:
        maxl = len(str(maxval)) - 2
    if minval < 1:
        minl = len(str(minval)) - 1
    else:
        minl = len(str(minval)) - 2
    margin_left = 0.5 * minl * fz
    margin_right = 0.5 * maxl * fz

    # Page width and height (iw, ih)
    # Position bar in percentage (*margin)
    # Here we take into account the extra space for the numbers and ticks

    if float(height) > float(width):
        iw = w + fz * maxl
        ih = h + margin_left + margin_right
        bmargin = str(margin_left / ih * 100)
        tmargin = str(100 - (margin_right / ih * 100))
        rmargin = str(100 * (w - tagmargin) / iw - 1)
        if flag_d:
            lmargin = str((2 + (bw * 1.75)) / iw * 100)
        else:
            lmargin = str(2 / iw * 100)
    else:
        iw = w + margin_left + margin_right
        ih = h + fz * 1.5
        bmargin = str((2 + tagmargin + fz * 1.5) / ih * 100)
        if flag_d:
            tmargin = str(100 - (2 + (bh * 1.75)) / ih * 100)
        else:
            tmargin = str(100 - 2 / ih * 100)
        lmargin = str(margin_left / iw * 100)
        rmargin = str(100 - margin_right / iw * 100)
    at = (bmargin, tmargin, lmargin, rmargin)

    # Open file connection, set font
    os.environ['GRASS_RENDER_IMMEDIATE'] = filetype
    os.environ['GRASS_RENDER_FILE'] = outputfile
    os.environ['GRASS_RENDER_HEIGHT'] = str(ih)
    os.environ['GRASS_RENDER_WIDTH'] = str(iw)
    if bgcolor == 'none':
        os.environ['GRASS_RENDER_TRANSPARENT'] = "TRUE"
    else:
        os.environ['GRASS_RENDER_BACKGROUNDCOLOR'] = bgcolor
    if flag_f and fontsize == 0:
        flag = 'cfsv'
    elif flag_f:
        flag = 'fsv'
    elif fontsize == 0:
        flag = 'csv'
    else:
        flag = 'sv'
    if flag_d:
        flag = flag + 'd'
    if flag_t:
        flag = flag + 't'

    # Write legend with various options
    d_legend = Module("d.legend",
                      flags=flag,
                      raster=inmap,
                      font=font,
                      at=at,
                      fontsize=fz,
                      labelnum=labelnum,
                      run_=False)
    if vr:
        val_range = list(map(float, vr.split(',')))
        d_legend.inputs.range = val_range
    if labval:
        label_values = list(map(float, labval.split(',')))
        d_legend.inputs.label_values = label_values
    if labstep:
        label_step = float(labstep)
        d_legend.inputs.label_step = label_step
    d_legend.run()

    # Set image resolution
    if found and outputfile.lower().endswith(('.png', '.bmp')):
        im = Image.open(outputfile)
        im.save(outputfile, dpi=(resol, resol))

    # Provide informatie about image on standard output
    grass.message("----------------------------\n")
    grass.message("File saved as {}".format(outputfile))
    grass.message("The image dimensions are:\n")
    grass.message("{} px wide and {} px heigh\n".format(
        str(int(iw)), str(int(ih))))
    if unit == 'inch':
        wr = round(iw / resol, 3)
        hr = round(ih / resol, 3)
    elif unit == 'cm':
        wr = round(iw / resol * 2.54, 3)
        hr = round(ih / resol * 2.54, 3)
    elif unit == 'mm':
        wr = round(iw / resol * 2.54 * 10, 3)
        hr = round(ih / resol * 2.54 * 10, 3)
    else:
        wr = "same"
    if wr != "same":
        grass.message("at a resolution of {} ppi this is:".format(str(resol)))
        grass.message("{0} {2} x {1} {2}\n".format(str(wr), str(hr), unit))
    grass.message("----------------------------\n")
Ejemplo n.º 5
0
def main():
    # lazy imports
    import grass.temporal as tgis
    from grass.pygrass.modules import Module

    # Get the options
    input = options["input"]
    output = options["output"]
    start = options["start"]
    stop = options["stop"]
    base = options["basename"]
    cycle = options["cycle"]
    lower = options["lower"]
    upper = options["upper"]
    offset = options["offset"]
    limits = options["limits"]
    shift = options["shift"]
    scale = options["scale"]
    method = options["method"]
    granularity = options["granularity"]
    register_null = flags["n"]
    reverse = flags["r"]
    time_suffix = options["suffix"]

    # Make sure the temporal database exists
    tgis.init()

    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    mapset = tgis.get_current_mapset()

    if input.find("@") >= 0:
        id = input
    else:
        id = input + "@" + mapset

    input_strds = tgis.SpaceTimeRasterDataset(id)

    if input_strds.is_in_db() == False:
        dbif.close()
        grass.fatal(_("Space time raster dataset <%s> not found") % (id))

    input_strds.select(dbif)

    if output.find("@") >= 0:
        out_id = output
    else:
        out_id = output + "@" + mapset

    # The output space time raster dataset
    output_strds = tgis.SpaceTimeRasterDataset(out_id)
    if output_strds.is_in_db(dbif):
        if not grass.overwrite():
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> is already in the "
                  "database, use overwrite flag to overwrite") % out_id)

    if tgis.check_granularity_string(granularity,
                                     input_strds.get_temporal_type()) == False:
        dbif.close()
        grass.fatal(_("Invalid granularity"))

    if tgis.check_granularity_string(cycle,
                                     input_strds.get_temporal_type()) == False:
        dbif.close()
        grass.fatal(_("Invalid cycle"))

    if offset:
        if tgis.check_granularity_string(
                offset, input_strds.get_temporal_type()) == False:
            dbif.close()
            grass.fatal(_("Invalid offset"))

    # The lower threshold space time raster dataset
    if lower:
        if not range:
            dbif.close()
            grass.fatal(
                _("You need to set the range to compute the occurrence"
                  " space time raster dataset"))

        if lower.find("@") >= 0:
            lower_id = lower
        else:
            lower_id = lower + "@" + mapset

        lower_strds = tgis.SpaceTimeRasterDataset(lower_id)
        if lower_strds.is_in_db() == False:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> not found") %
                (lower_strds.get_id()))

        if lower_strds.get_temporal_type() != input_strds.get_temporal_type():
            dbif.close()
            grass.fatal(
                _("Temporal type of input strds and lower strds must be equal")
            )

        lower_strds.select(dbif)

    # The upper threshold space time raster dataset
    if upper:
        if not lower:
            dbif.close()
            grass.fatal(
                _("The upper option works only in conjunction with the lower option"
                  ))

        if upper.find("@") >= 0:
            upper = upper
        else:
            upper_id = upper + "@" + mapset

        upper_strds = tgis.SpaceTimeRasterDataset(upper_id)
        if upper_strds.is_in_db() == False:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> not found") %
                (upper_strds.get_id()))

        if upper_strds.get_temporal_type() != input_strds.get_temporal_type():
            dbif.close()
            grass.fatal(
                _("Temporal type of input strds and upper strds must be equal")
            )

        upper_strds.select(dbif)

    input_strds_start, input_strds_end = input_strds.get_temporal_extent_as_tuple(
    )

    if input_strds.is_time_absolute():
        start = tgis.string_to_datetime(start)
        if stop:
            stop = tgis.string_to_datetime(stop)
        else:
            stop = input_strds_end
        start = tgis.adjust_datetime_to_granularity(start, granularity)
    else:
        start = int(start)
        if stop:
            stop = int(stop)
        else:
            stop = input_strds_end

    if input_strds.is_time_absolute():
        end = tgis.increment_datetime_by_string(start, cycle)
    else:
        end = start + cycle

    limit_relations = [
        "EQUALS", "DURING", "OVERLAPS", "OVERLAPPING", "CONTAINS"
    ]

    count = 1
    output_maps = []

    while input_strds_end > start and stop > start:

        # Make sure that the cyclic computation will stop at the correct time
        if stop and end > stop:
            end = stop

        where = "start_time >= \'%s\' AND start_time < \'%s\'" % (str(start),
                                                                  str(end))
        input_maps = input_strds.get_registered_maps_as_objects(where=where,
                                                                dbif=dbif)

        grass.message(_("Processing cycle %s - %s" % (str(start), str(end))))

        if len(input_maps) == 0:
            continue

        # Lets create a dummy list of maps with granularity conform intervals
        gran_list = []
        gran_list_low = []
        gran_list_up = []
        gran_start = start
        while gran_start < end:
            map = input_strds.get_new_map_instance("%i@%i" % (count, count))
            if input_strds.is_time_absolute():
                gran_end = tgis.increment_datetime_by_string(
                    gran_start, granularity)
                map.set_absolute_time(gran_start, gran_end)
                gran_start = tgis.increment_datetime_by_string(
                    gran_start, granularity)
            else:
                gran_end = gran_start + granularity
                map.set_relative_time(gran_start, gran_end,
                                      input_strds.get_relative_time_unit())
                gran_start = gran_start + granularity
            gran_list.append(copy(map))
            gran_list_low.append(copy(map))
            gran_list_up.append(copy(map))
        # Lists to compute the topology with upper and lower datasets

        # Create the topology between the granularity conform list and all maps
        # of the current cycle
        gran_topo = tgis.SpatioTemporalTopologyBuilder()
        gran_topo.build(gran_list, input_maps)

        if lower:
            lower_maps = lower_strds.get_registered_maps_as_objects(dbif=dbif)
            gran_lower_topo = tgis.SpatioTemporalTopologyBuilder()
            gran_lower_topo.build(gran_list_low, lower_maps)

        if upper:
            upper_maps = upper_strds.get_registered_maps_as_objects(dbif=dbif)
            gran_upper_topo = tgis.SpatioTemporalTopologyBuilder()
            gran_upper_topo.build(gran_list_up, upper_maps)

        old_map_name = None

        # Aggregate
        num_maps = len(gran_list)

        for i in range(num_maps):
            if reverse:
                map = gran_list[num_maps - i - 1]
            else:
                map = gran_list[i]
            # Select input maps based on temporal topology relations
            input_maps = []
            if map.get_equal():
                input_maps += map.get_equal()
            elif map.get_contains():
                input_maps += map.get_contains()
            elif map.get_overlaps():
                input_maps += map.get_overlaps()
            elif map.get_overlapped():
                input_maps += map.get_overlapped()
            elif map.get_during():
                input_maps += map.get_during()

            # Check input maps
            if len(input_maps) == 0:
                continue

            # New output map
            if input_strds.get_temporal_type(
            ) == 'absolute' and time_suffix == 'gran':
                suffix = tgis.create_suffix_from_datetime(
                    map.temporal_extent.get_start_time(),
                    input_strds.get_granularity())
                output_map_name = "{ba}_{su}".format(ba=base, su=suffix)
            elif input_strds.get_temporal_type(
            ) == 'absolute' and time_suffix == 'time':
                suffix = tgis.create_time_suffix(map)
                output_map_name = "{ba}_{su}".format(ba=base, su=suffix)
            else:
                output_map_name = tgis.create_numeric_suffix(
                    base, count, time_suffix)

            output_map_id = map.build_id(output_map_name, mapset)
            output_map = input_strds.get_new_map_instance(output_map_id)

            # Check if new map is in the temporal database
            if output_map.is_in_db(dbif):
                if grass.overwrite():
                    # Remove the existing temporal database entry
                    output_map.delete(dbif)
                    output_map = input_strds.get_new_map_instance(
                        output_map_id)
                else:
                    grass.fatal(
                        _("Map <%s> is already registered in the temporal"
                          " database, use overwrite flag to overwrite.") %
                        (output_map.get_map_id()))

            map_start, map_end = map.get_temporal_extent_as_tuple()

            if map.is_time_absolute():
                output_map.set_absolute_time(map_start, map_end)
            else:
                output_map.set_relative_time(map_start, map_end,
                                             map.get_relative_time_unit())

            limits_vals = limits.split(",")
            limits_lower = float(limits_vals[0])
            limits_upper = float(limits_vals[1])

            lower_map_name = None
            if lower:
                relations = gran_list_low[i].get_temporal_relations()
                for relation in limit_relations:
                    if relation in relations:
                        lower_map_name = str(relations[relation][0].get_id())
                        break

            upper_map_name = None
            if upper:
                relations = gran_list_up[i].get_temporal_relations()
                for relation in limit_relations:
                    if relation in relations:
                        upper_map_name = str(relations[relation][0].get_id())
                        break

            input_map_names = []
            for input_map in input_maps:
                input_map_names.append(input_map.get_id())

            # Set up the module
            accmod = Module("r.series.accumulate",
                            input=input_map_names,
                            output=output_map_name,
                            run_=False)

            if old_map_name:
                accmod.inputs["basemap"].value = old_map_name
            if lower_map_name:
                accmod.inputs["lower"].value = lower_map_name
            if upper_map_name:
                accmod.inputs["upper"].value = upper_map_name

            accmod.inputs["limits"].value = (limits_lower, limits_upper)

            if shift:
                accmod.inputs["shift"].value = float(shift)

            if scale:
                accmod.inputs["scale"].value = float(scale)

            if method:
                accmod.inputs["method"].value = method

            print(accmod)
            accmod.run()

            if accmod.popen.returncode != 0:
                dbif.close()
                grass.fatal(_("Error running r.series.accumulate"))

            output_maps.append(output_map)
            old_map_name = output_map_name
            count += 1

        # Increment the cycle
        start = end
        if input_strds.is_time_absolute():
            start = end
            if offset:
                start = tgis.increment_datetime_by_string(end, offset)

            end = tgis.increment_datetime_by_string(start, cycle)
        else:
            if offset:
                start = end + offset
            end = start + cycle

    # Insert the maps into the output space time dataset
    if output_strds.is_in_db(dbif):
        if grass.overwrite():
            output_strds.delete(dbif)
            output_strds = input_strds.get_new_instance(out_id)

    temporal_type, semantic_type, title, description = input_strds.get_initial_values(
    )
    output_strds.set_initial_values(temporal_type, semantic_type, title,
                                    description)
    output_strds.insert(dbif)

    empty_maps = []
    # Register the maps in the database
    count = 0
    for output_map in output_maps:
        count += 1
        if count % 10 == 0:
            grass.percent(count, len(output_maps), 1)
        # Read the raster map data
        output_map.load()
        # In case of a empty map continue, do not register empty maps

        if not register_null:
            if output_map.metadata.get_min() is None and \
                output_map.metadata.get_max() is None:
                empty_maps.append(output_map)
                continue

        # Insert map in temporal database
        output_map.insert(dbif)
        output_strds.register_map(output_map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    output_strds.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    dbif.close()

    # Remove empty maps
    if len(empty_maps) > 0:
        for map in empty_maps:
            grass.run_command("g.remove",
                              flags='f',
                              type="raster",
                              name=map.get_name(),
                              quiet=True)
Ejemplo n.º 6
0
def main():
    # Get the options
    input = options["input"]
    output = options["output"]
    start = options["start"]
    stop = options["stop"]
    base = options["basename"]
    cycle = options["cycle"]
    lower = options["lower"]
    upper = options["upper"]
    offset = options["offset"]
    limits = options["limits"]
    shift = options["shift"]
    scale = options["scale"]
    method = options["method"]
    granularity = options["granularity"]
    register_null = flags["n"]
    reverse = flags["r"]

    # Make sure the temporal database exists
    tgis.init()

    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    mapset = tgis.get_current_mapset()

    if input.find("@") >= 0:
        id = input
    else:
        id = input + "@" + mapset

    input_strds = tgis.SpaceTimeRasterDataset(id)

    if input_strds.is_in_db() == False:
        dbif.close()
        grass.fatal(_("Space time raster dataset <%s> not found") % (id))

    input_strds.select(dbif)

    if output.find("@") >= 0:
        out_id = output
    else:
        out_id = output + "@" + mapset

    # The output space time raster dataset
    output_strds = tgis.SpaceTimeRasterDataset(out_id)
    if output_strds.is_in_db(dbif):
        if not grass.overwrite():
            dbif.close()
            grass.fatal(_("Space time raster dataset <%s> is already in the "
                          "database, use overwrite flag to overwrite") % out_id)

    if tgis.check_granularity_string(granularity,
                                     input_strds.get_temporal_type()) == False:
            dbif.close()
            grass.fatal(_("Invalid granularity"))

    if tgis.check_granularity_string(cycle,
                                     input_strds.get_temporal_type()) == False:
            dbif.close()
            grass.fatal(_("Invalid cycle"))

    if offset:
        if tgis.check_granularity_string(offset,
                                         input_strds.get_temporal_type()) == False:
                dbif.close()
                grass.fatal(_("Invalid offset"))

    # The lower threshold space time raster dataset
    if lower:
        if not range:
            dbif.close()
            grass.fatal(_("You need to set the range to compute the occurrence"
                          " space time raster dataset"))

        if lower.find("@") >= 0:
            lower_id = lower
        else:
            lower_id = lower + "@" + mapset

        lower_strds = tgis.SpaceTimeRasterDataset(lower_id)
        if lower_strds.is_in_db() == False:
            dbif.close()
            grass.fatal(_("Space time raster dataset <%s> not found") % (lower_strds.get_id()))

        if lower_strds.get_temporal_type() != input_strds.get_temporal_type():
            dbif.close()
            grass.fatal(_("Temporal type of input strds and lower strds must be equal"))

        lower_strds.select(dbif)

    # The upper threshold space time raster dataset
    if upper:
        if not lower:
            dbif.close()
            grass.fatal(_("The upper option works only in conjunction with the lower option"))

        if upper.find("@") >= 0:
            upper = upper
        else:
            upper_id = upper + "@" + mapset

        upper_strds = tgis.SpaceTimeRasterDataset(upper_id)
        if upper_strds.is_in_db() == False:
            dbif.close()
            grass.fatal(_("Space time raster dataset <%s> not found") % (upper_strds.get_id()))

        if upper_strds.get_temporal_type() != input_strds.get_temporal_type():
            dbif.close()
            grass.fatal(_("Temporal type of input strds and upper strds must be equal"))

        upper_strds.select(dbif)

    input_strds_start, input_strds_end = input_strds.get_temporal_extent_as_tuple()

    if input_strds.is_time_absolute():
        start = tgis.string_to_datetime(start)
        if stop:
            stop = tgis.string_to_datetime(stop)
        else:
            stop = input_strds_end
        start = tgis.adjust_datetime_to_granularity(start, granularity)
    else:
        start = int(start)
        if stop:
            stop = int(stop)
        else:
            stop = input_strds_end

    if input_strds.is_time_absolute():
        end = tgis.increment_datetime_by_string(start, cycle)
    else:
        end = start + cycle

    limit_relations = ["EQUALS", "DURING", "OVERLAPS", "OVERLAPPING", "CONTAINS"]

    count = 1
    output_maps = []


    while input_strds_end > start and stop > start:

        # Make sure that the cyclic computation will stop at the correct time
        if stop and end > stop:
            end = stop

        where = "start_time >= \'%s\' AND start_time < \'%s\'"%(str(start),
                                                                str(end))
        input_maps = input_strds.get_registered_maps_as_objects(where=where,
                                                                dbif=dbif)

        grass.message(_("Processing cycle %s - %s"%(str(start), str(end))))

        if len(input_maps) == 0:
            continue

        # Lets create a dummy list of maps with granularity conform intervals
        gran_list = []
        gran_list_low = []
        gran_list_up = []
        gran_start = start
        while gran_start < end:
            map = input_strds.get_new_map_instance("%i@%i"%(count, count))
            if input_strds.is_time_absolute():
                gran_end = tgis.increment_datetime_by_string(gran_start,
                                                             granularity)
                map.set_absolute_time(gran_start, gran_end)
                gran_start = tgis.increment_datetime_by_string(gran_start,
                                                               granularity)
            else:
                gran_end = gran_start + granularity
                map.set_relative_time(gran_start, gran_end,
                                      input_strds.get_relative_time_unit())
                gran_start = gran_start + granularity
            gran_list.append(copy(map))
            gran_list_low.append(copy(map))
            gran_list_up.append(copy(map))
        # Lists to compute the topology with upper and lower datasets

        # Create the topology between the granularity conform list and all maps
        # of the current cycle
        gran_topo = tgis.SpatioTemporalTopologyBuilder()
        gran_topo.build(gran_list, input_maps)

        if lower:
            lower_maps = lower_strds.get_registered_maps_as_objects(dbif=dbif)
            gran_lower_topo = tgis.SpatioTemporalTopologyBuilder()
            gran_lower_topo.build(gran_list_low, lower_maps)

        if upper:
            upper_maps = upper_strds.get_registered_maps_as_objects(dbif=dbif)
            gran_upper_topo = tgis.SpatioTemporalTopologyBuilder()
            gran_upper_topo.build(gran_list_up, upper_maps)

        old_map_name = None

        # Aggregate
        num_maps = len(gran_list)

        for i in xrange(num_maps):
            if reverse:
                map = gran_list[num_maps - i - 1]
            else:
                map = gran_list[i]
            # Select input maps based on temporal topology relations
            input_maps = []
            if map.get_equal():
                input_maps += map.get_equal()
            elif map.get_contains():
                input_maps += map.get_contains()
            elif map.get_overlaps():
                input_maps += map.get_overlaps()
            elif map.get_overlapped():
                input_maps += map.get_overlapped()
            elif map.get_during():
                input_maps += map.get_during()

            # Check input maps
            if len(input_maps) == 0:
                continue

            # New output map
            output_map_name = "%s_%i" % (base, count)
            output_map_id = map.build_id(output_map_name, mapset)
            output_map = input_strds.get_new_map_instance(output_map_id)

            # Check if new map is in the temporal database
            if output_map.is_in_db(dbif):
                if grass.overwrite():
                    # Remove the existing temporal database entry
                    output_map.delete(dbif)
                    output_map = input_strds.get_new_map_instance(output_map_id)
                else:
                    grass.fatal(_("Map <%s> is already registered in the temporal"
                                 " database, use overwrite flag to overwrite.") %
                                (output_map.get_map_id()))

            map_start, map_end = map.get_temporal_extent_as_tuple()

            if map.is_time_absolute():
                output_map.set_absolute_time(map_start, map_end)
            else:
                output_map.set_relative_time(map_start, map_end,
                                             map.get_relative_time_unit())

            limits_vals = limits.split(",")
            limits_lower = float(limits_vals[0])
            limits_upper = float(limits_vals[1])

            lower_map_name = None
            if lower:
                relations = gran_list_low[i].get_temporal_relations()
                for relation in limit_relations:
                    if relation in relations:
                        lower_map_name = str(relations[relation][0].get_id())
                        break

            upper_map_name = None
            if upper:
                relations = gran_list_up[i].get_temporal_relations()
                for relation in limit_relations:
                    if relation in relations:
                        upper_map_name = str(relations[relation][0].get_id())
                        break

            input_map_names = []
            for input_map in input_maps:
                input_map_names.append(input_map.get_id())

            # Set up the module
            accmod = Module("r.series.accumulate", input=input_map_names,
                            output=output_map_name, run_=False)

            if old_map_name:
                accmod.inputs["basemap"].value = old_map_name
            if lower_map_name:
                accmod.inputs["lower"].value = lower_map_name
            if upper_map_name:
                accmod.inputs["upper"].value = upper_map_name

            accmod.inputs["limits"].value = (limits_lower, limits_upper)

            if shift:
                accmod.inputs["shift"].value = float(shift)

            if scale:
                accmod.inputs["scale"].value = float(scale)

            if method:
                accmod.inputs["method"].value = method

            print accmod
            accmod.run()

            if accmod.popen.returncode != 0:
                dbif.close()
                grass.fatal(_("Error running r.series.accumulate"))

            output_maps.append(output_map)
            old_map_name = output_map_name
            count += 1

        # Increment the cycle
        start = end
        if input_strds.is_time_absolute():
            start = end
            if offset:
                start = tgis.increment_datetime_by_string(end, offset)

            end = tgis.increment_datetime_by_string(start, cycle)
        else:
            if offset:
                start = end + offset
            end = start + cycle

    # Insert the maps into the output space time dataset
    if output_strds.is_in_db(dbif):
        if grass.overwrite():
            output_strds.delete(dbif)
            output_strds = input_strds.get_new_instance(out_id)

    temporal_type, semantic_type, title, description = input_strds.get_initial_values()
    output_strds.set_initial_values(temporal_type, semantic_type, title,
                                    description)
    output_strds.insert(dbif)

    empty_maps = []
    # Register the maps in the database
    count = 0
    for output_map in output_maps:
        count += 1
        if count%10 == 0:
            grass.percent(count, len(output_maps), 1)
        # Read the raster map data
        output_map.load()
        # In case of a empty map continue, do not register empty maps

        if not register_null:
            if output_map.metadata.get_min() is None and \
                output_map.metadata.get_max() is None:
                empty_maps.append(output_map)
                continue

        # Insert map in temporal database
        output_map.insert(dbif)
        output_strds.register_map(output_map, dbif)

    # Update the spatio-temporal extent and the metadata table entries
    output_strds.update_from_registered_maps(dbif)
    grass.percent(1, 1, 1)

    dbif.close()

    # Remove empty maps
    if len(empty_maps) > 0:
        for map in empty_maps:
            grass.run_command("g.remove", flags='f', type="rast",  pattern=map.get_name(), quiet=True)
Ejemplo n.º 7
0
def import2grass(files, args, datefmt="%Y%m", mapset_fmt="%Y_%m",
                 raster_fmt="%Y_%m", input_fmt="NETCDF:{input_file}",
                 **kwargs):
    # old variables
    nprocs = args.nprocs
    gisdbase = args.grassdata
    location = args.location
    mapset = args.mapset
    rename = args.rename
    convert = args.convert
    outs = {}
    env = os.environ.copy()
    mset_envs = {}
    mset_rasters = {}
    if nprocs > 1:
        queue = ParallelModuleQueue(nprocs=nprocs)

    for fdir, fil in files:
        base, date = extract_date(fil, datefmt=datefmt)
        if base not in outs.keys():
            outs[base] = []
        else:
            outs[base].append(date)
        if mapset_fmt:
            mset_name = date.strftime(mapset_fmt)
            mset_path = os.path.join(gisdbase, location, mset_name)
            if not os.path.exists(mset_path):
                gs.grass_create(gs.GRASSBIN, mset_path, create_opts="")
                try:
                    os.makedirs(os.path.join(mset_path, '.tmp'))
                    os.makedirs(os.path.join(mset_path, '.tmp',
                                             socket.gethostname()))
                except:
                    # ignore error in creating the
                    pass
            try:
                menv = mset_envs[mset_name]
                rasters = mset_rasters[mset_name]
            except KeyError:
                menv = gs.grass_init(gs.GISBASE, gisdbase, location, mset_name,
                                     env=env.copy())
                mset_envs[mset_name] = menv
                mset = Mapset(mset_name, location=location, gisdbase=gisdbase)
                rasters = set(mset.glist("raster"))
                mset_rasters[mset_name] = rasters
        else:
            menv = gs.grass_init(gs.GISBASE, gisdbase, location, mapset,
                                 env=env.copy())
            mset = Mapset(mapset, location=location, gisdbase=gisdbase)
            rasters = set(mset.glist("raster"))
        rast_name = "{ba}_{da}".format(ba=base, da=date.strftime(raster_fmt))
        if rast_name + '.1' not in rasters or rast_name + '.6' not in rasters:
            ifile = os.path.join(fdir, fil)
            mod = Module("r.in.gdal", quiet=True,
                         input=input_fmt.format(input_file=ifile),
                         output=rast_name, run_=False, **kwargs)
            if nprocs > 1:
                mod.env_ = menv
                #time.sleep(0.2) # sllep otherwise there is a problem in creating
                queue.put(mod)
            else:
                mod.run()
                if convert:
                    convert_maps(base, date, log=args.log)
                if rename:
                    rename_maps(base, date, log=args.log)
    if nprocs > 1:
        queue.wait()
    return outs