Beispiel #1
0
def run_r_sun(elevation, aspect, slope, day, time, linke, linke_value, albedo,
              albedo_value, beam_rad, diff_rad, refl_rad, glob_rad, incidout,
              suffix, binary, binaryTmpName, flags):
    params = {}
    if linke:
        params.update({'linke': linke})
    if linke_value:
        params.update({'linke_value': linke_value})
    if albedo:
        params.update({'albedo': albedo})
    if albedo_value:
        params.update({'albedo_value': albedo_value})
    if beam_rad:
        params.update({'beam_rad': beam_rad + suffix})
    if diff_rad:
        params.update({'diff_rad': diff_rad + suffix})
    if refl_rad:
        params.update({'refl_rad': refl_rad + suffix})
    if glob_rad:
        params.update({'glob_rad': glob_rad + suffix})
    if incidout:
        params.update({'incidout': incidout + suffix})
    if flags:
        params.update({'flags': flags})

    if is_grass_7():
        grass.run_command('r.sun',
                          elevation=elevation,
                          aspect=aspect,
                          slope=slope,
                          day=day,
                          time=time,
                          overwrite=core.overwrite(),
                          quiet=True,
                          **params)
    else:
        grass.run_command('r.sun',
                          elevin=elevation,
                          aspin=aspect,
                          slopein=slope,
                          day=day,
                          time=time,
                          overwrite=core.overwrite(),
                          quiet=True,
                          **params)
    if binary:
        for output in (beam_rad, diff_rad, refl_rad, glob_rad):
            if not output:
                continue
            exp = '{out} = if({inp} > 0, 1, 0)'.format(out=output + suffix +
                                                       binaryTmpName,
                                                       inp=output + suffix)
            grass.mapcalc(exp=exp, overwrite=core.overwrite())
            grass.run_command(
                'g.rename',
                raster=[output + suffix + binaryTmpName, output + suffix],
                overwrite=True)
Beispiel #2
0
def run_r_sun(elevation, aspect, slope, day, time,
              linke, linke_value, albedo, albedo_value,
              horizon_basename, horizon_step,
              beam_rad, diff_rad, refl_rad, glob_rad,
              incidout, suffix, binary, binaryTmpName):
    params = {}
    if linke:
        params.update({'linke': linke})
    if linke_value:
        params.update({'linke_value': linke_value})
    if albedo:
        params.update({'albedo': albedo})
    if albedo_value:
        params.update({'albedo_value': albedo_value})
    if beam_rad:
        params.update({'beam_rad': beam_rad + suffix})
    if diff_rad:
        params.update({'diff_rad': diff_rad + suffix})
    if refl_rad:
        params.update({'refl_rad': refl_rad + suffix})
    if glob_rad:
        params.update({'glob_rad': glob_rad + suffix})
    if incidout:
        params.update({'incidout': incidout + suffix})
    if horizon_basename and horizon_step:
        params.update({'horizon_basename': horizon_basename})
        params.update({'horizon_step': horizon_step})


    if is_grass_7():
        grass.run_command('r.sun', elevation=elevation, aspect=aspect,
                          slope=slope, day=day, time=time,
                          overwrite=core.overwrite(), quiet=True,
                          **params)
    else:
        grass.run_command('r.sun', elevin=elevation, aspin=aspect,
                          slopein=slope, day=day, time=time,
                          overwrite=core.overwrite(), quiet=True,
                          **params)
    if binary:
        for output in (beam_rad, diff_rad, refl_rad, glob_rad):
            if not output:
                continue
            exp='{out} = if({inp} > 0, 1, 0)'.format(out=output + suffix + binaryTmpName,
                                                     inp=output + suffix)
            grass.mapcalc(exp=exp, overwrite=core.overwrite())
            grass.run_command('g.rename', raster = [output + suffix + binaryTmpName,
                                                output + suffix],
                              overwrite=True)
Beispiel #3
0
def run_r_shaded_relief(
    elevation_input,
    shades_basename,
    altitude,
    azimuth,
    z_exaggeration,
    scale,
    units,
    suffix,
):
    params = {}
    if units:
        params.update({"units": units})
    grass.run_command(
        "r.relief",
        input=elevation_input,
        output=shades_basename + suffix,
        azimuth=azimuth,
        zscale=z_exaggeration,
        scale=scale,
        altitude=altitude,
        overwrite=core.overwrite(),
        quiet=True,
        **params,
    )
def import_files(directory, pattern):
    maps = []
    if pattern:
        from glob import glob
        files = glob('{dir}{sep}{pat}'.format(
            dir=directory, sep=os.path.sep, pat=pattern)
        )
    else:
        files = map(lambda x: os.path.join(directory, x),
                    os.listdir(directory)
        )

    start = time.time()

    import_module = Module('v.in.ascii', separator='space', z=3, flags='tbz',
                           overwrite=overwrite(), quiet=True, run_=False)
    try:
        for f in files:
            basename = os.path.basename(f)
            mapname = os.path.splitext(basename)[0]
            maps.append(mapname)
            message("Importing <{}>...".format(f))
            import_task = deepcopy(import_module)
            queue.put(import_task(input=f, output=mapname))
        queue.wait()
    except CalledModuleError:
        return sys.exit(1)

    if not maps:
        fatal("No input files found")

    message("Import finished in {:.0f} sec".format(time.time() - start))

    return maps
Beispiel #5
0
def check_file(filename, map_type, sep):
    with open(filename) as names_file:
        for line in names_file:
            line = line.strip()
            names = line.split(sep)
            if len(names) != 2:
                max_display_chars_for_line = 10
                if len(line) > max_display_chars_for_line:
                    line = line[:max_display_chars_for_line]
                gcore.fatal(
                    _("Cannot parse line <{line}> using separator"
                      " <{sep}> in file <{file}>. Nothing renamed.").format(
                          line=line, sep=sep, file=filename))
            if not map_exists(names[0], type=map_type):
                gcore.fatal(
                    _("Map <{name}> (type <{type}>) does not exist"
                      " in the current Mapset."
                      " Nothing renamed. Note that maps in other Mapsets cannot"
                      " be renamed, however they can be copied.").format(
                          name=names[0], type=map_type))
            if not gcore.overwrite() and map_exists(names[1], type=map_type):
                gcore.fatal(
                    _("Map <{name}> (type <{type}>) already exists."
                      " Nothing renamed."
                      " Use overwrite flag if you want to overwrite"
                      " the existing maps.").format(name=names[0],
                                                    type=map_type))
Beispiel #6
0
def main(opts, flgs):
    ow = overwrite()


    CCEXTR = 'cable_crane_extraction = if(yield>0 && slope>'+opts['slp_min_cc']+' && slope<='+opts['slp_max_cc']+' && extr_dist<'+opts['dist_max_cc']+', 1)'

    FWEXTR = 'forwarder_extraction = if(yield>0 && slope<='+opts['slp_max_fw']+' && management==1 && (roughness==0 || roughness==1 || roughness==99999) && extr_dist<'+opts['dist_max_fw']+', 1)'

    OEXTR = 'other_extraction = if(yield>0 && slope<='+opts['slp_max_cop']+' && management==2 && (roughness==0 || roughness==1 || roughness==99999) && extr_dist<'+opts['dist_max_cop']+', 1)'

    EHF = 'technical_bioenergyHF = technical_surface*(if(management==1 && treatment==1 || management==1 && treatment==99999, yield_pix*'+opts['energy_tops_hf']+', if(management==1 && treatment==2, yield_pix *'+opts['energy_tops_hf']+' + yield_pix * '+opts['energy_cormometric_vol_hf']+')))'

    ECC = 'technical_bioenergyC = technical_surface*(if(management == 2, yield_pix*'+opts['energy_tops_cop']+'))'




    run_command("r.param.scale", overwrite=ow,
                input=opts['dtm'], output="morphometric_features",
                size=3, param="feature")
    run_command("r.slope.aspect", overwrite=ow,
                elevation=opts['dtm'], slope="slope_deg")
    run_command("r.mapcalc", overwrite=ow,
                expression='pix_cross = ((ewres()+nsres())/2)/ cos(slope_deg)')
    run_command("r.null", map="yield_pix1", null=0)
    run_command("r.null", map="lakes", null=0)
    run_command("r.null", map="rivers", null=0)
    run_command("r.null", map="morphometric_features", null=0)
    #morphometric_features==6 -> peaks
    run_command("r.mapcalc", overwrite=ow,
                expression='frict_surf_extr = if(morphometric_features==6, 99999) + if(rivers>=1 || lakes>=1, 99999) + if(yield_pix1<=0, 99999) + pix_cross')
    run_command("r.cost", overwrite=ow,
                input="frict_surf_extr", output="extr_dist",
                stop_points="forest", start_rast="forest_roads",
                max_cost=1500)
    run_command("r.slope.aspect", flags="a", overwrite=ow,
                elevation=opts['dtm'], slope="slope", format="percent")
    run_command("r.mapcalc", overwrite=ow,expression=CCEXTR)
    run_command("r.mapcalc", overwrite=ow,
                expression=FWEXTR)
    run_command("r.mapcalc", overwrite=ow,
                expression=OEXTR)
    run_command("r.null", map="cable_crane_extraction", null=0)
    run_command("r.null", map="forwarder_extraction", null=0)
    run_command("r.null", map="other_extraction", null=0)
    run_command("r.mapcalc", overwrite=ow,
                expression='technical_surface = cable_crane_extraction + forwarder_extraction + other_extraction')
    run_command("r.statistics", overwrite=ow,
                base="compartment", cover="technical_surface", method="sum",
                output="techn_pix_comp")
    run_command("r.mapcalc", overwrite=ow,
                expression='yield_pix2 = yield/(technical_surface*@techn_pix_comp)')
    run_command("r.null", map="yield_pix2", null=0)
    run_command("r.mapcalc", overwrite=ow,
                expression=YPIX % (1 if flgs['u'] else 0, 0 if flgs['u'] else 1,))
    run_command("r.mapcalc", overwrite=ow,expression=EHF)
    run_command("r.mapcalc", overwrite=ow,
                expression=ECC)
    run_command("r.mapcalc", overwrite=ow,
                expression='technical_bioenergy =  (technical_bioenergyHF +  technical_bioenergyC)')
Beispiel #7
0
def get_new_points(points, lines, output, maxdist=50):
    skipped = []
    ovwr = gcore.overwrite()
    msgr = get_msgr()
    points, pmset = points.split('@') if '@' in points else (points, '')
    lines, lmset = lines.split('@') if '@' in lines else (lines, '')
    with VectorTopo(points, mapset=pmset, mode='r') as pts:
        cols = pts.table.columns.items() if pts.table else None
        with VectorTopo(lines, mapset=lmset, mode='r') as lns:
            with VectorTopo(output, mode='w', tab_cols=cols,
                            overwrite=ovwr) as out:
                for pnt in pts:
                    line = lns.find['by_point'].geo(pnt, maxdist=maxdist)
                    if line is None:
                        msg = ("Not found any line in the radius of %.2f "
                               "for the point with cat: %d. The point "
                               "will be skipped!")
                        msgr.warning(msg % (maxdist, pnt.cat))
                        skipped.append(pnt.cat)
                        continue
                    # find the new point
                    newpnt, dist, _, _ = line.distance(pnt)
                    # get the attributes
                    attrs = (None
                             if pnt.attrs is None else pnt.attrs.values()[1:])
                    # write the new point in the new vector map
                    out.write(newpnt, attrs)
                # save the changes on the output table
                out.table.conn.commit()
Beispiel #8
0
def main(opts, flgs):
    TMPVECT = []
    DEBUG = True if flgs['d'] else False
    atexit.register(cleanup, vector=TMPVECT, debug=DEBUG)

    # check input maps
    plant = [
        opts['plant_column_discharge'], opts['plant_column_elevup'],
        opts['plant_column_elevdown'], opts['plant_column_point_id'],
        opts['plant_column_plant_id'], opts['plant_column_power'],
        opts['plant_column_stream_id']
    ]
    ovwr = overwrite()

    try:
        plnt = check_required_columns(opts['plant'], int(opts['plant_layer']),
                                      plant, 'plant')
    except ParameterError as exc:
        exception2error(exc)
        return

    if not opts['output_point']:
        output_point = 'tmp_output_point'
        TMPVECT.append(output_point)
    else:
        output_point = opts['output_point']

    plnt = conv_segpoints(opts['plant'], output_point)

    el, mset = (opts['elevation'].split('@') if '@' in opts['elevation'] else
                (opts['elevation'], ''))

    elev = RasterRow(name=el, mapset=mset)
    elev.open('r')
    plnt.open('r')

    plants, skipped = read_plants(plnt,
                                  elev=elev,
                                  restitution='restitution',
                                  intake='intake',
                                  ckind_label='kind_label',
                                  cdischarge='discharge',
                                  celevation='elevation',
                                  cid_point='cat',
                                  cid_plant='plant_id')

    plnt.close()

    # contour options
    resolution = float(opts['resolution']) if opts['resolution'] else None
    write_structures(plants,
                     opts['output_struct'],
                     elev,
                     ndigits=int(opts['ndigits']),
                     resolution=resolution,
                     contour=opts['contour'],
                     overwrite=ovwr)
    elev.close()
Beispiel #9
0
def main():
    input = options['input']
    layer = options['layer']
    format = options['format']
    dsn = options['dsn']
    table = options['table']

    if format.lower() == 'dbf':
        format = "ESRI_Shapefile"

    if format.lower() == 'csv':
        olayer = basename(dsn, 'csv')
    else:
        olayer = None

    #is there a simpler way of testing for --overwrite?
    dbffile = input + '.dbf'
    if os.path.exists(dbffile) and not grass.overwrite():
        grass.fatal(_("File <%s> already exists") % dbffile)

    if olayer:
        if grass.run_command('v.out.ogr',
                             quiet=True,
                             input=input,
                             layer=layer,
                             dsn=dsn,
                             format=format,
                             type='point,line,area',
                             olayer=olayer) != 0:
            sys.exit(1)
    else:
        if grass.run_command('v.out.ogr',
                             quiet=True,
                             input=input,
                             layer=layer,
                             dsn=dsn,
                             format=format,
                             type='point,line,area') != 0:
            sys.exit(1)

    if format == "ESRI_Shapefile":
        exts = ['shp', 'shx', 'prj']
        if dsn.endswith('.dbf'):
            outname = basename(dsn, 'dbf')
            for ext in exts:
                try_remove("%s.%s" % (outname, ext))
            outname += '.dbf'
        else:
            for ext in exts:
                try_remove(os.path.join(dsn, "%s.%s" % (input, ext)))
            outname = os.path.join(dsn, input + ".dbf")
    elif format.lower() == 'csv':
        outname = dsn + '.csv'
    else:
        outname = input

    grass.message(_("Exported table <%s>") % outname)
Beispiel #10
0
def main():
    input = options['input']
    layer = options['layer']
    format = options['format']
    output = options['output']
    table = options['table']

    if format.lower() == 'dbf':
	format = "ESRI_Shapefile"

    if format.lower() == 'csv':
	olayer = basename(output, 'csv')
    else:
	olayer = None

    #is there a simpler way of testing for --overwrite?
    dbffile = input + '.dbf'
    if os.path.exists(dbffile) and not grass.overwrite():
	grass.fatal(_("File <%s> already exists") % dbffile)

    if olayer:
        try:
            grass.run_command('v.out.ogr', quiet=True, input=input, layer=layer,
                              output=output,
                              format=format, type='point,line,area',
                              olayer=olayer)
        except CalledModuleError:
            grass.fatal(_("Module <%s> failed") % 'v.out.ogr')

    else:
        try:
            grass.run_command('v.out.ogr', quiet=True, input=input,
                              layer=layer, output=output,
                              format=format, type='point,line,area')
        except CalledModuleError:
            grass.fatal(_("Module <%s> failed") % 'v.out.ogr')

    if format == "ESRI_Shapefile":
	exts = ['shp', 'shx', 'prj']
	if output.endswith('.dbf'):
	    outname = basename(output, 'dbf')
	    for ext in exts:
		try_remove("%s.%s" % (outname, ext))
	    outname += '.dbf'
	else:
	    for ext in exts:
		try_remove(os.path.join(output, "%s.%s" % (input, ext)))
	    outname = os.path.join(output, input + ".dbf")
    elif format.lower() == 'csv':
	outname = output + '.csv'
    else:
	outname = input

    grass.message(_("Exported table <%s>") % outname)
Beispiel #11
0
def main():
    input = options['input']
    layer = options['layer']
    format = options['format']
    output = options['output']
    table = options['table']

    if format.lower() == 'dbf':
        format = "ESRI_Shapefile"

    if format.lower() == 'csv':
        olayer = basename(output, 'csv')
    else:
        olayer = None

    # is there a simpler way of testing for --overwrite?
    dbffile = input + '.dbf'
    if os.path.exists(dbffile) and not gcore.overwrite():
        gcore.fatal(_("File <%s> already exists") % dbffile)

    if olayer:
        try:
            gcore.run_command('v.out.ogr', quiet=True, input=input,
                              layer=layer, output=output, format=format,
                              type='point,line,area', olayer=olayer)
        except CalledModuleError:
            gcore.fatal(_("Module <%s> failed") % 'v.out.ogr')

    else:
        try:
            gcore.run_command('v.out.ogr', quiet=True, input=input,
                              layer=layer, output=output,
                              format=format, type='point,line,area')
        except CalledModuleError:
            gcore.fatal(_("Module <%s> failed") % 'v.out.ogr')

    if format == "ESRI_Shapefile":
        exts = ['shp', 'shx', 'prj']
        if output.endswith('.dbf'):
            outname = basename(output, 'dbf')
            for ext in exts:
                try_remove("%s.%s" % (outname, ext))
            outname += '.dbf'
        else:
            for ext in exts:
                try_remove(os.path.join(output, "%s.%s" % (input, ext)))
            outname = os.path.join(output, input + ".dbf")
    elif format.lower() == 'csv':
        outname = output + '.csv'
    else:
        outname = input

    gcore.message(_("Exported table <%s>") % outname)
Beispiel #12
0
def run_r_sun(elevation, aspect, slope, latitude, longitude, linke,
              linke_value, albedo, albedo_value, horizon_basename,
              horizon_step, solar_constant, day, step, beam_rad, diff_rad,
              refl_rad, glob_rad, insol_time, suffix, flags):
    '''
    Execute r.sun using the provided input options. Except for the required
    parameters, the function updates the list of optional/selected parameters
    to instruct for user requested inputs and outputs.
    Optional inputs:

    - latitude
    - longitude
    - linke  OR  single linke value
    - albedo  OR  single albedo value
    - horizon maps
    '''
    params = {}
    if beam_rad:
        params.update({'beam_rad': beam_rad + suffix})
    if diff_rad:
        params.update({'diff_rad': diff_rad + suffix})
    if refl_rad:
        params.update({'refl_rad': refl_rad + suffix})
    if glob_rad:
        params.update({'glob_rad': glob_rad + suffix})
    if insol_time:
        params.update({'insol_time': insol_time + suffix})
    if linke:
        params.update({'linke': linke})
    if linke_value:
        params.update({'linke_value': linke_value})
    if albedo:
        params.update({'albedo': albedo})
    if albedo_value:
        params.update({'albedo_value': albedo_value})
    if horizon_basename and horizon_step:
        params.update({'horizon_basename': horizon_basename})
        params.update({'horizon_step': horizon_step})
    if solar_constant is not None:
        params.update({'solar_constant': solar_constant})

    if flags:
        params.update({'flags': flags})

    grass.run_command('r.sun',
                      elevation=elevation,
                      aspect=aspect,
                      slope=slope,
                      day=day,
                      step=step,
                      overwrite=core.overwrite(),
                      quiet=True,
                      **params)
Beispiel #13
0
def main():
    options, flags = gcore.parser()
    print options, flags
    gisenv = gcore.gisenv()
    if 'MONITOR' in gisenv:
        cmd_file = gisenv['MONITOR_{monitor}_CMDFILE'.format(monitor=gisenv['MONITOR'].upper())]
        d_cmd = 'd.to.rast'
        for param, val in options.iteritems():
            if val:
                d_cmd += " {param}={val}".format(param=param, val=val)
        if gcore.overwrite():
            d_cmd += ' --overwrite'
        with open(cmd_file, "a") as file_:
            file_.write(d_cmd)
    else:
        gcore.fatal(_("No graphics device selected. Use d.mon to select graphics device."))
Beispiel #14
0
def main():
    options, flags = gcore.parser()
    gisenv = gcore.gisenv()
    if 'MONITOR' in gisenv:
        cmd_file = gcore.parse_command('d.mon', flags='g')['cmd']
        d_cmd = 'd.to.rast'
        for param, val in options.items():
            if val:
                d_cmd += " {param}={val}".format(param=param, val=val)
        if gcore.overwrite():
            d_cmd += ' --overwrite'
        with open(cmd_file, "a") as file_:
            file_.write(d_cmd)
    else:
        gcore.fatal(
            _("No graphics device selected. Use d.mon to select graphics device."
              ))
Beispiel #15
0
def main():
    options, flags = gcore.parser()
    gisenv = gcore.gisenv()
    if "MONITOR" in gisenv:
        cmd_file = gcore.parse_command("d.mon", flags="g")["cmd"]
        d_cmd = "d.to.rast"
        for param, val in options.items():
            if val:
                d_cmd += " {param}={val}".format(param=param, val=val)
        if gcore.overwrite():
            d_cmd += " --overwrite"
        with open(cmd_file, "a") as file_:
            file_.write(d_cmd)
    else:
        gcore.fatal(
            _("No graphics device selected. Use d.mon to select graphics device.")
        )
Beispiel #16
0
def main():
    input = options['input']
    format = options['format']
    dsn = options['dsn']
    table = options['table']

    if format.lower() == 'dbf':
	format = "ESRI_Shapefile"

    if format.lower() == 'csv':
	olayer = grass.basename(dsn, 'csv')
    else:
	olayer = None

    #is there a simpler way of testing for --overwrite?
    dbffile = input + '.dbf'
    if os.path.exists(dbffile) and not grass.overwrite():
	grass.fatal(_("File <%s> already exists") % dbffile)

    if olayer:
	if grass.run_command('v.out.ogr', quiet = True, input = input, dsn = dsn,
			     format = format, type = 'point', olayer = olayer) != 0:
	    sys.exit(1)
    else:
	if grass.run_command('v.out.ogr', quiet = True, input = input, dsn = dsn,
			     format = format, type = 'point') != 0:
	    sys.exit(1)

    if format == "ESRI_Shapefile":
	exts = ['shp', 'shx', 'prj']
	if dsn.endswith('.dbf'):
	    outname = grass.basename(dsn, 'dbf')
	    for ext in exts:
		grass.try_remove("%s.%s" % (outname, ext))
	    outname += '.dbf'
	else:
	    for ext in exts:
		grass.try_remove(os.path.join(dsn, "%s.%s" % (input, ext)))
	    outname = os.path.join(dsn, input + ".dbf")
    elif format.lower() == 'csv':
	outname = dsn + '.csv'
    else:
	outname = input

    grass.message(_("Exported table <%s>") % outname)
Beispiel #17
0
    def p_statement_assign(self, t):
        """
        statement : NAME EQUALS expression
                  | NAME EQUALS name
                  | NAME EQUALS paren_name
        """
        # We remove the invalid vector name from the list
        if t[3] in self.names:
            self.names.pop(t[3])

        # We rename the resulting vector map
        if self.debug:
            print("g.rename vector=%s,%s" % (t[3], t[1]))

        if self.run:
            m = mod.Module('g.rename', vector=(t[3], t[1]),
                           overwrite=grass.overwrite(), run_=False)
            self.cmdlist.add_cmd(m)
        self.remove_intermediate_vector_maps()
Beispiel #18
0
def main():
    options, flags = gcore.parser()
    print options, flags
    gisenv = gcore.gisenv()
    if 'MONITOR' in gisenv:
        cmd_file = gisenv['MONITOR_{monitor}_CMDFILE'.format(
            monitor=gisenv['MONITOR'].upper())]
        d_cmd = 'd.to.rast'
        for param, val in options.iteritems():
            if val:
                d_cmd += " {param}={val}".format(param=param, val=val)
        if gcore.overwrite():
            d_cmd += ' --overwrite'
        with open(cmd_file, "a") as file_:
            file_.write(d_cmd)
    else:
        gcore.fatal(
            _("No graphics device selected. Use d.mon to select graphics device."
              ))
Beispiel #19
0
def main():
    magnitude = options['magnitude']
    direction = options['direction']
    divergence = options['output']
    global TMP, CLEANUP

    tmp_name = 'tmp_divergence_' + str(os.getpid())
    qsx = tmp_name + "qsx"
    qsy = tmp_name + "qsy"
    qsx_dx = tmp_name + "qsx_dx"
    qsy_dy = tmp_name + "qsy_dy"
    TMP.extend([qsx, qsy, qsx_dx, qsy_dy])

    # checks if there are already some maps
    old_maps = temp_maps_exist()
    if old_maps:
        if not gcore.overwrite():
            CLEANUP = False
            gcore.fatal(
                _("You have to first check overwrite flag or remove"
                  " the following maps:\n"
                  "names}").format(names=','.join(old_maps)))
        else:
            gcore.warning(
                _("The following maps will be overwritten: {names}").format(
                    names=','.join(old_maps)))
    try:
        grast.mapcalc(exp="{qsx}={mag} * cos({direct})".format(
            qsx=qsx, mag=magnitude, direct=direction))
        grast.mapcalc(exp="{qsy}={mag} * sin({direct})".format(
            qsy=qsy, mag=magnitude, direct=direction))
        gcore.run_command('r.slope.aspect', elevation=qsx, dx=qsx_dx)
        gcore.run_command('r.slope.aspect', elevation=qsy, dy=qsy_dy)
        grast.mapcalc(exp="{div}={qsx_dx} + {qsy_dy}".format(
            div=divergence, qsx_dx=qsx_dx, qsy_dy=qsy_dy))
    except CalledModuleError:
        gcore.fatal(
            _("r.divergence failed, check errors above. Please report this problem to developers."
              ))
        return 1

    grast.raster_history(divergence)
    return 0
def make_new_table(vct, tname, cols=COLS, force=None):
    """Check/remove/create a new table"""
    msgr = get_msgr()
    force = overwrite() if force is None else force
    create_link = True
    # make a new table
    table = Table(tname, vct.table.conn)
    if table.exist():
        if any([table.name == l.table_name for l in vct.dblinks]):
            create_link = False
        msg = _("Table <%s> already exist and will be removed.")
        msgr.warning(msg % table.name)
        table.drop(force=force)
    table.create(cols)
    # fill the new table with the segment cats
    slct = vct.table.filters.select(vct.table.key)
    cur = vct.table.execute(slct.get_sql())
    table.insert(((cat[0], None) for cat in cur), many=True)
    table.conn.commit()
    return table, create_link
def create_dmt_tiles(maps, res, rst_nprocs, offset_multiplier=10):
    offset=res * offset_multiplier

    start = time.time()

    region_module = Module('g.region', n='n+{}'.format(offset), s='s-{}'.format(offset),
                           e='e+{}'.format(offset), w='w-{}'.format(offset),
                           quiet=True)
    rst_module = Module('v.surf.rst', nprocs=rst_nprocs,
                        overwrite=overwrite(), quiet=True, run_=False)
    try:
        for mapname in maps:
            message("Interpolating <{}>...".format(mapname))
            region_task = deepcopy(region_module)
            rst_task = deepcopy(rst_module)
            mm = MultiModule([region_task(vector=mapname),
                              rst_task(input=mapname, elevation=mapname)],
                              sync=False, set_temp_region=True)
            queue.put(mm)
        queue.wait()
    except CalledModuleError:
        return sys.exit(1)

    message("Interpolation finished in {:.0f} min".format((time.time() - start) / 60.))
def main(opts, flgs):
    pid = os.getpid()
    pat = "tmprgreen_%i_*" % pid
    atexit.register(cleanup,
                    pattern=pat,
                    debug=False)
    # check or generate raster map from rules files

    ecovalues = ['landvalue', 'tributes', 'stumpage', 'rotation', 'age',
                 'min_exc', 'max_exc']
    (lan, tri, stu, rot, age,
     excmin, excmax) = check_raster_or_landuse(opts, ecovalues)
    upper = opts['upper'] if opts['upper'] else ('tmprgreen_%i_upper' % pid)
    comp = opts['compensation'] if opts['compensation'] else ('tmprgreen_%i_compensation' % pid)
    exc = opts['excavation'] if opts['excavation'] else ('tmprgreen_%i_excavation' % pid)
    vlayer = int(opts['struct_layer'])

    plant, mset = (opts['plant'].split('@') if '@' in opts['plant'] else (opts['plant'], ''))

    struct, mset = (opts['struct'].split('@') if '@' in opts['struct'] else (opts['struct'], ''))

    # read common scalar parameters
    irate = float(opts['interest_rate'])
    life = float(opts['life'])
    width = float(opts['width'])
    depth = float(opts['depth'])
    slim = float(opts['slope_limit'])
    overw = overwrite()

    # RASTERS
    # Start computing the raster map of the costs
    # Upper value
    upper_value(upper, stu, lan, rot, age, irate, overw)
    # Compensation raster costs
    compensation_cost(comp, lan, tri, upper,
                      irate, float(opts['gamma_comp']), life, width, overw)
    # Excavation raster costs
    excavation_cost(exc, excmin, excmax, opts['slope'],
                    slim, width, depth, overw)
    # TODO: extra cost when crossing roads and rivers are missing

    # VECTOR
    # add columns with costs from rasters
    # add compensation costs
    v.rast_stats(map=struct, layer=vlayer, flags='c',
                 raster=comp, column_prefix='comp_cost', method='sum')
    # add excavation costs
    v.rast_stats(map=struct, layer=vlayer, flags='c',
                 raster=exc, column_prefix='exc_cost', method='sum')

    # add elecro-mechanical costs
    electromechanical_cost(struct,
                           power=opts['struct_column_power'],
                           head=opts['struct_column_head'],
                           gamma=float(opts['gamma_em']),
                           alpha=float(opts['alpha_em']),
                           beta=float(opts['beta_em']),
                           const=float(opts['const_em']),
                           vlayer=vlayer, cname='em_cost',
                           ctype='double precision',
                           overwrite=overw)

    # add linear cost for pipeline
    linear_cost(vname=struct, cname='lin_pipe_cost',
                alpha=float(opts['lc_pipe']), vlayer=vlayer,
                ctype='double precision',  overwrite=overw)

    # add linear for for electroline
    get_electro_length(opts)
    linear_cost(vname=struct, cname='lin_electro_cost',
                alpha=float(opts['lc_electro']), length='electro_length',
                vlayer=vlayer, ctype='double precision',  overwrite=overw)
    # Compensation raster costs for electroline
    comp = (opts['compensation']+'el' if
            opts['compensation']
            else ('tmprgreen_%i_compensation_el' % pid))
    exc = (opts['excavation']+'el'
           if opts['excavation']
           else ('tmprgreen_%i_excavation_el' % pid))
    compensation_cost(comp, lan, tri, upper,
                      irate, float(opts['gamma_comp']), life, 0.6, overw)
    # Excavation raster costs for electroline
    excavation_cost(exc, excmin, excmax, opts['slope'],
                    slim, 0.6, 0.6, overw)

    # add excavation cost and compensation cost for electroline
    elines = (opts['elines'] if opts['elines']
              else ('tmprgreen_%i_elines' % pid))
    v.rast_stats(map=elines, layer=vlayer, flags='c',
                 raster=comp, column_prefix='comp_cost', method='sum')
    # add excavation costs
    v.rast_stats(map=elines, layer=vlayer, flags='c',
                 raster=exc, column_prefix='exc_cost', method='sum')
    write2struct(elines, opts)

    xcost = "{cname} = {alpha} * {em}"
    # add power station costs
    vcolcalc(vname=struct, vlayer=vlayer,
             ctype='double precision',
             expr=xcost.format(cname='station_cost', em='em_cost',
                               alpha=opts['alpha_station']))
    # add inlet costs
    vcolcalc(vname=struct, vlayer=vlayer,
             ctype='double precision', notfinitesubstitute=0.,
             expr=xcost.format(cname='inlet_cost', em='em_cost',
                               alpha=opts['alpha_inlet']))
    # add total inlet costs
    # TODO: to be check to avoid to count cost more than one time I have moltiplied by 0.5
    tot = ('tot_cost = (comp_cost_sum + em_cost + el_comp_exc +'
           'lin_pipe_cost + lin_electro_cost + '
           'station_cost + inlet_cost + {grid}*0.5) * '
           '(1 + {general} + {hindrances})')
    vcolcalc(vname=struct, vlayer=vlayer,
             ctype='double precision', notfinitesubstitute=0.,
             expr=tot.format(grid=opts['grid'], general=opts['general'],
                             hindrances=opts['hindrances']))

    # TODO: produce a new vector map (output), with the conduct + penstock in
    # a unique line and sum the costs grouping by intake_id and side
    # SELECT {key} FROM {tname}
    #FIXME: intake_id and discharge can have different names
    group_by(struct, opts['output_struct'],
             isolate=['intake_id', opts['struct_column_id'],
                      opts['struct_column_side'], opts['struct_column_power'],
                      opts['struct_column_head'], 'discharge'],
             aggregate=['tot_cost', ],
             function='sum',
             group_by=['intake_id', opts['struct_column_side']])

    """
    where these values (3871.2256 and -0.45) are coming from?
    import numpy as np
    from scipy import stats

    power= np.array([50., 100., 200., 400., 600., 1000., 5000.])
    maint = np.array([707., 443., 389., 261., 209., 163., 88.])

    plt.plot(np.log(power), np.log(maint))
    plt.show()
    slope, intercept, r_value, p_value, std_err = stats.linregress(np.log(power), np.log(maint))
    #slope -0.45000431409701719
    #intercept 8.2613264284076049
    np.exp(intercept) * power ** slope

    """
#    maint = "{cname} = {alpha} * {power} ** (1 + {beta}) + {const}"
    maint = "{cname} = {cost_per_kW} * {power} * {alpha} + {const}"
    # compute yearly maintenance costs
    vcolcalc(vname=opts['output_struct'], vlayer=vlayer,
             ctype='double precision', notfinitesubstitute=0.,
             expr=maint.format(cname='maintenance',
                               cost_per_kW=opts['cost_maintenance_per_kw'],
                               alpha=opts['alpha_maintenance'],
                               power=opts['struct_column_power'],
                               beta=opts['beta_maintenance'],
                               const=opts['const_maintenance']))

    # compute yearly revenues
    rev = "{cname} = {eta} * {power} * {eprice} * {ophours}  + {const}"
    vcolcalc(vname=opts['output_struct'], vlayer=vlayer,
             ctype='double precision', notfinitesubstitute=0.,
             expr=rev.format(cname='revenue',
                             eta=opts['eta'],
                             power=opts['struct_column_power'],
                             eprice=opts['energy_price'],
                             ophours=opts['operative_hours'],
                             const=opts['const_revenue']))

    # compute the Net Present Value
    npv = "{cname} = {gamma} * ({revenue} - {maintenance}) - {tot}"
    gamma_npv = get_gamma_NPV(irate, life)
    vcolcalc(vname=opts['output_struct'], vlayer=vlayer,
             ctype='double precision', notfinitesubstitute=0.,
             expr=npv.format(cname='NPV',
                             gamma=gamma_npv,
                             revenue='revenue',
                             maintenance='maintenance',
                             tot='tot_cost'))

    economic2segment(economic=opts['output_struct'], segment=plant,
                         basename=opts['plant_basename'],
                         eco_layer=1, seg_layer=int(opts['plant_layer']),
                         eco_pid=opts['struct_column_id'],
                         seg_pid=opts['plant_column_id'],
                         function=max_NPV,
                         exclude=['intake_id', 'side', 'power',
                                  'gross_head', 'discharge'])

    vec = VectorTopo(opts['output_struct'])
    vec.open('rw')
    vec.table.columns.add('max_NPV','VARCHAR(3)')

    list_intakeid=list(set(vec.table.execute('SELECT intake_id FROM %s' %vec.table.name).fetchall()))

    for i in range(0,len(list_intakeid)):
        vec.rewind()
        list_npv=list(vec.table.execute('SELECT NPV FROM %s WHERE intake_id=%i;' % (vec.table.name, list_intakeid[i][0])).fetchall())
        npvmax=max(list_npv)[0]
        for line in vec:
            if line.attrs['intake_id'] == list_intakeid[i][0]:
                if line.attrs['NPV'] == npvmax:
                    line.attrs['max_NPV']='yes'
                else:
                    line.attrs['max_NPV']='no'

    vec.table.conn.commit()
    vec.close()
Beispiel #23
0
def main():
    input = options["input"]
    output = options["output"]
    fs = options["fs"]
    proj_in = options["proj_input"]
    proj_out = options["proj_output"]
    ll_in = flags["i"]
    ll_out = flags["o"]
    decimal = flags["d"]
    copy_input = flags["e"]
    include_header = flags["c"]

    #### check for cs2cs
    if not grass.find_program("cs2cs"):
        grass.fatal(_("cs2cs program not found, install PROJ.4 first: http://proj.maptools.org"))

        #### check for overenthusiasm
    if proj_in and ll_in:
        grass.fatal(_("Choose only one input parameter method"))

    if proj_out and ll_out:
        grass.fatal(_("Choose only one output parameter method"))

    if ll_in and ll_out:
        grass.fatal(_("Choise only one auto-projection parameter method"))

    if output and not grass.overwrite() and os.path.exists(output):
        grass.fatal(_("Output file already exists"))

        #### parse field separator
        # FIXME: input_x,y needs to split on multiple whitespace between them
    if fs == ",":
        ifs = ofs = ","
    else:
        try:
            ifs, ofs = fs.split(",")
        except ValueError:
            ifs = ofs = fs

    ifs = ifs.lower()
    ofs = ofs.lower()

    if ifs in ("space", "tab"):
        ifs = " "
    elif ifs == "comma":
        ifs = ","
    else:
        if len(ifs) > 1:
            grass.warning(_("Invalid field separator, using '%s'") % ifs[0])
        try:
            ifs = ifs[0]
        except IndexError:
            grass.fatal(_("Invalid field separator '%s'") % ifs)

    if ofs.lower() == "space":
        ofs = " "
    elif ofs.lower() == "tab":
        ofs = "\t"
    elif ofs.lower() == "comma":
        ofs = ","
    else:
        if len(ofs) > 1:
            grass.warning(_("Invalid field separator, using '%s'") % ofs[0])
        try:
            ofs = ofs[0]
        except IndexError:
            grass.fatal(_("Invalid field separator '%s'") % ifs)

    #### set up projection params
    s = grass.read_command("g.proj", flags="j")
    kv = grass.parse_key_val(s)
    if "XY location" in kv["+proj"] and (ll_in or ll_out):
        grass.fatal(_("Unable to project to or from a XY location"))

    in_proj = None

    if ll_in:
        in_proj = "+proj=longlat +datum=WGS84"
        grass.verbose("Assuming LL WGS84 as input, current projection as output ")

    if ll_out:
        in_proj = grass.read_command("g.proj", flags="jf")

    if proj_in:
        in_proj = proj_in

    if not in_proj:
        grass.verbose("Assuming current location as input")
        in_proj = grass.read_command("g.proj", flags="jf")

    in_proj = in_proj.strip()
    grass.verbose("Input parameters: '%s'" % in_proj)

    out_proj = None

    if ll_out:
        out_proj = "+proj=longlat +datum=WGS84"
        grass.verbose("Assuming current projection as input, LL WGS84 as output ")

    if ll_in:
        out_proj = grass.read_command("g.proj", flags="jf")

    if proj_out:
        out_proj = proj_out

    if not out_proj:
        grass.fatal(_("Missing output projection parameters "))
    out_proj = out_proj.strip()
    grass.verbose("Output parameters: '%s'" % out_proj)

    #### set up input file
    if input == "-":
        infile = None
        inf = sys.stdin
    else:
        infile = input
        if not os.path.exists(infile):
            grass.fatal(_("Unable to read input data"))
        inf = file(infile)
        grass.debug("input file=[%s]" % infile)

        #### set up output file
    if not output:
        outfile = None
        outf = sys.stdout
    else:
        outfile = output
        outf = open(outfile, "w")
        grass.debug("output file=[%s]" % outfile)

        #### set up output style
    if not decimal:
        outfmt = ["-w5"]
    else:
        outfmt = ["-f", "%.8f"]
    if not copy_input:
        copyinp = []
    else:
        copyinp = ["-E"]

        #### do the conversion
        # Convert cs2cs DMS format to GRASS DMS format:
        #   cs2cs | sed -e 's/d/:/g' -e "s/'/:/g"  -e 's/"//g'

    cmd = ["cs2cs"] + copyinp + outfmt + in_proj.split() + ["+to"] + out_proj.split()
    p = grass.Popen(cmd, stdin=grass.PIPE, stdout=grass.PIPE)

    tr = TrThread(ifs, inf, p.stdin)
    tr.start()

    if not copy_input:
        if include_header:
            outf.write("x%sy%sz\n" % (ofs, ofs))
        for line in p.stdout:
            xy, z = line.split(" ", 1)
            x, y = xy.split("\t")
            outf.write("%s%s%s%s%s\n" % (x.strip(), ofs, y.strip(), ofs, z.strip()))
    else:
        if include_header:
            outf.write("input_x%sinput_y%sx%sy%sz\n" % (ofs, ofs, ofs, ofs))
        for line in p.stdout:
            inXYZ, x, rest = line.split("\t")
            inX, inY = inXYZ.split(" ")[:2]
            y, z = rest.split(" ", 1)
            outf.write(
                "%s%s%s%s%s%s%s%s%s\n" % (inX.strip(), ofs, inY.strip(), ofs, x.strip(), ofs, y.strip(), ofs, z.strip())
            )

    p.wait()

    if p.returncode != 0:
        grass.warning(_("Projection transform probably failed, please investigate"))
Beispiel #24
0
def main(opts, flgs):
    ow = overwrite()

    output = opts["output_prefix"]
    management = opts["management"]
    treatment = opts["treatment"]

    # ouput variables
    rec_bioenergyHF = output + "_rec_bioenergyHF"
    rec_bioenergyC = output + "_rec_bioenergyC"
    rec_bioenergy = output + "_rec_bioenergy"

    # input variables
    pot_HF = opts["hfmap"]
    pot_C = opts["cmap"]

    rivers = opts["hydro"]

    buffer_hydro = int(opts["buffer_hydro"])

    zone_less = opts["zone_less"]

    check_var = 0

    # pdb.set_trace()

    # check if the raster of rivers is present with a buffer inserted
    if buffer_hydro > 0 and rivers == "":
        print(
            "if the river buffer is greater than zero, the raster map of rivers is required"
        )
        return

    # recovery of the series of raster constraint maps
    restr_map = opts["restrictions"].split(",")

    # start the query string
    expr_map = "constraint="

    # check if at least 1 constraint map is inserted
    if opts["restrictions"] != "":
        check_var = 1
        count_map = 0

        # cycle with composition of the query string with all the constraint maps
        for mapr_ in restr_map:
            mapr1 = string.split(mapr_, "@")
            mapr = mapr1[0]

            if count_map == 0:
                expr_map += "(" + mapr
                convert_bool = mapr + "=" + mapr + ">0"
                run_command("r.mapcalc", overwrite=1, expression=convert_bool)
                run_command("r.null", map=mapr, null=0)
            else:
                expr_map += "||" + mapr
                convert_bool = mapr + "=" + mapr + ">0"
                run_command("r.mapcalc", overwrite=1, expression=convert_bool)
                run_command("r.null", map=mapr, null=0)
            count_map += 1
        expr_map += ")"

    # if the river buffer is inserted add calculate the buffer and
    # add the buffer to the constraint map
    if buffer_hydro > 0:
        run_command("r.null", map=rivers, null=0)
        run_command(
            "r.buffer",
            overwrite=ow,
            input=rivers,
            output="rivers_buffer",
            distances=buffer_hydro,
            flags="z",
        )
        run_command("r.null", map="rivers_buffer", null=0)
        if check_var == 1:
            expr_map += "|| (rivers || rivers_buffer)"
        else:
            expr_map += "rivers || rivers_buffer"
        check_var = 1

    if zone_less != "":
        check_var = 1

    if check_var == 0:
        print("Error: At least one constraint map must be inserted")
        return
    else:
        # if at least one contraint is inserted process the potential map
        run_command(
            "r.mapcalc", overwrite=ow, expression=rec_bioenergyHF + "=" + pot_HF
        )
        run_command("r.mapcalc", overwrite=ow, expression=rec_bioenergyC + "=" + pot_C)

        run_command("r.mapcalc", overwrite=ow, expression=expr_map)
        run_command("r.mapcalc", overwrite=ow, expression="constraint=constraint<1")
        run_command("r.null", map="constraint", null=0)

        constr_HF = rec_bioenergyHF + "=" + rec_bioenergyHF + "*constraint"
        constr_C = rec_bioenergyC + "=" + rec_bioenergyC + "*constraint"

        run_command("r.mapcalc", overwrite=ow, expression=constr_HF)
        run_command("r.mapcalc", overwrite=ow, expression=constr_C)

    if zone_less != "":
        run_command(
            "r.mapcalc",
            overwrite=ow,
            expression="wood_pix=(" + zone_less + "/((ewres()*nsres())*10000))",
        )
        WHF = (
            "wood_energyHF= if("
            + management
            + "==1 && "
            + treatment
            + "==1 || "
            + management
            + " == 1 && "
            + treatment
            + "==99999, wood_pix*%f, if("
            + management
            + "==1 && "
            + treatment
            + "==2, wood_pix*%f + wood_pix*%f))"
        )
        WCC = (
            "wood_energyC = if("
            + management
            + "==2, wood_pix*"
            + opts["energy_tops_cop"]
            + ")"
        )
        run_command(
            "r.mapcalc",
            overwrite=ow,
            expression=WHF
            % tuple(
                map(
                    float,
                    (
                        opts["energy_tops_hf"],
                        opts["energy_tops_hf"],
                        opts["energy_cormometric_vol_hf"],
                    ),
                )
            ),
        )
        run_command("r.mapcalc", overwrite=ow, expression=WCC)

        run_command("r.null", map="wood_energyHF", null=0)
        run_command("r.null", map="wood_energyC", null=0)

        limit_HF = rec_bioenergyHF + "=" + rec_bioenergyHF + "-wood_energyHF"
        limit_C = rec_bioenergyC + "=" + rec_bioenergyC + "-wood_energyC"

        run_command("r.mapcalc", overwrite=ow, expression=limit_HF)
        run_command("r.mapcalc", overwrite=ow, expression=limit_C)

    RECOT = rec_bioenergy + " = (" + rec_bioenergyHF + " + " + rec_bioenergyC + ")"

    run_command("r.mapcalc", overwrite=ow, expression=RECOT)

    with RasterRow(rec_bioenergy) as pT:
        T = np.array(pT)

    print(
        "Resulted maps: "
        + output
        + "_rec_bioenergyHF, "
        + output
        + "_rec_bioenergyC, "
        + output
        + "_rec_bioenergy"
    )
    print("Total bioenergy stimated (Mwh): %.2f" % np.nansum(T))
Beispiel #25
0
def main():
    coords = options['coordinates']
    input = options['input']
    output = options['output']
    fs = options['separator']
    proj_in = options['proj_in']
    proj_out = options['proj_out']
    ll_in = flags['i']
    ll_out = flags['o']
    decimal = flags['d']
    copy_input = flags['e']
    include_header = flags['c']

    # check for cs2cs
    if not gcore.find_program('cs2cs'):
        gcore.fatal(
            _("cs2cs program not found, install PROJ.4 first: \
            http://proj.maptools.org"))

    # check for overenthusiasm
    if proj_in and ll_in:
        gcore.fatal(_("Choose only one input parameter method"))

    if proj_out and ll_out:
        gcore.fatal(_("Choose only one output parameter method"))

    if ll_in and ll_out:
        gcore.fatal(_("Choose only one auto-projection parameter method"))

    if output and not gcore.overwrite() and os.path.exists(output):
        gcore.fatal(_("Output file already exists"))

    if not coords and not input:
        gcore.fatal(_("One of <coordinates> and <input> must be given"))
    if coords and input:
        gcore.fatal(
            _("Options <coordinates> and <input> are mutually exclusive"))

    # parse field separator
    # FIXME: input_x,y needs to split on multiple whitespace between them
    if fs == ',':
        ifs = ofs = ','
    else:
        try:
            ifs, ofs = fs.split(',')
        except ValueError:
            ifs = ofs = fs

    ifs = separator(ifs)
    ofs = separator(ofs)

    # set up projection params
    s = gcore.read_command("g.proj", flags='j')
    kv = parse_key_val(s)
    if "XY location" in kv['+proj'] and (ll_in or ll_out):
        gcore.fatal(_("Unable to project to or from a XY location"))

    in_proj = None

    if ll_in:
        in_proj = "+proj=longlat +datum=WGS84"
        gcore.verbose(
            "Assuming LL WGS84 as input, current projection as output ")

    if ll_out:
        in_proj = gcore.read_command('g.proj', flags='jf')

    if proj_in:
        if '+' in proj_in:
            in_proj = proj_in
        else:
            gcore.fatal(_("Invalid PROJ.4 input specification"))

    if not in_proj:
        gcore.verbose("Assuming current location as input")
        in_proj = gcore.read_command('g.proj', flags='jf')

    in_proj = in_proj.strip()
    gcore.verbose("Input parameters: '%s'" % in_proj)

    out_proj = None

    if ll_out:
        out_proj = "+proj=longlat +datum=WGS84"
        gcore.verbose(
            "Assuming current projection as input, LL WGS84 as output ")

    if ll_in:
        out_proj = gcore.read_command('g.proj', flags='jf')

    if proj_out:
        if '+' in proj_out:
            out_proj = proj_out
        else:
            gcore.fatal(_("Invalid PROJ.4 output specification"))

    if not out_proj:
        gcore.fatal(_("Missing output projection parameters "))
    out_proj = out_proj.strip()
    gcore.verbose("Output parameters: '%s'" % out_proj)

    # set up input file
    if coords:
        x, y = coords.split(',')
        tmpfile = gcore.tempfile()
        fd = open(tmpfile, "w")
        fd.write("%s%s%s\n" % (x, ifs, y))
        fd.close()
        inf = file(tmpfile)
    else:
        if input == '-':
            infile = None
            inf = sys.stdin
        else:
            infile = input
            if not os.path.exists(infile):
                gcore.fatal(_("Unable to read input data"))
            inf = file(infile)
            gcore.debug("input file=[%s]" % infile)

    # set up output file
    if not output:
        outfile = None
        outf = sys.stdout
    else:
        outfile = output
        outf = open(outfile, 'w')
        gcore.debug("output file=[%s]" % outfile)

    # set up output style
    if not decimal:
        outfmt = ["-w5"]
    else:
        outfmt = ["-f", "%.8f"]
    if not copy_input:
        copyinp = []
    else:
        copyinp = ["-E"]

    # do the conversion
    # Convert cs2cs DMS format to GRASS DMS format:
    #   cs2cs | sed -e 's/d/:/g' -e "s/'/:/g"  -e 's/"//g'

    cmd = ['cs2cs'] + copyinp + outfmt + \
        in_proj.split() + ['+to'] + out_proj.split()

    p = gcore.Popen(cmd, stdin=gcore.PIPE, stdout=gcore.PIPE)

    tr = TrThread(ifs, inf, p.stdin)
    tr.start()

    if not copy_input:
        if include_header:
            outf.write("x%sy%sz\n" % (ofs, ofs))
        for line in p.stdout:
            try:
                xy, z = line.split(' ', 1)
                x, y = xy.split('\t')
            except ValueError:
                gcore.fatal(line)

            outf.write('%s%s%s%s%s\n' %
                       (x.strip(), ofs, y.strip(), ofs, z.strip()))
    else:
        if include_header:
            outf.write("input_x%sinput_y%sx%sy%sz\n" % (ofs, ofs, ofs, ofs))
        for line in p.stdout:
            inXYZ, x, rest = line.split('\t')
            inX, inY = inXYZ.split(' ')[:2]
            y, z = rest.split(' ', 1)
            outf.write('%s%s%s%s%s%s%s%s%s\n' %
                       (inX.strip(), ofs, inY.strip(), ofs, x.strip(), ofs,
                        y.strip(), ofs, z.strip()))

    p.wait()

    if p.returncode != 0:
        gcore.warning(
            _("Projection transform probably failed, please investigate"))
Beispiel #26
0
def run_r_sun(elevation, aspect, slope, day, time, civil_time, linke,
              linke_value, albedo, albedo_value, coeff_bh, coeff_dh, lat,
              long_, beam_rad, diff_rad, refl_rad, glob_rad, incidout, suffix,
              binary, tmpName, time_step, distance_step, solar_constant,
              flags):
    params = {}
    if linke:
        params.update({'linke': linke})
    if linke_value:
        params.update({'linke_value': linke_value})
    if albedo:
        params.update({'albedo': albedo})
    if albedo_value:
        params.update({'albedo_value': albedo_value})
    if coeff_bh:
        params.update({'coeff_bh': coeff_bh})
    if coeff_dh:
        params.update({'coeff_dh': coeff_dh})
    if lat:
        params.update({'lat': lat})
    if long_:
        params.update({'long': long_})
    if beam_rad:
        params.update({'beam_rad': beam_rad + suffix})
    if diff_rad:
        params.update({'diff_rad': diff_rad + suffix})
    if refl_rad:
        params.update({'refl_rad': refl_rad + suffix})
    if glob_rad:
        params.update({'glob_rad': glob_rad + suffix})
    if incidout:
        params.update({'incidout': incidout + suffix})
    if flags:
        params.update({'flags': flags})
    if civil_time is not None:
        params.update({'civil_time': civil_time})
    if distance_step is not None:
        params.update({'distance_step': distance_step})
    if solar_constant is not None:
        params.update({'solar_constant': solar_constant})

    if is_grass_7():
        grass.run_command('r.sun',
                          elevation=elevation,
                          aspect=aspect,
                          slope=slope,
                          day=day,
                          time=time,
                          overwrite=core.overwrite(),
                          quiet=True,
                          **params)
    else:
        grass.run_command('r.sun',
                          elevin=elevation,
                          aspin=aspect,
                          slopein=slope,
                          day=day,
                          time=time,
                          overwrite=core.overwrite(),
                          quiet=True,
                          **params)
    if binary:
        for output in (beam_rad, diff_rad, refl_rad, glob_rad):
            if not output:
                continue
            exp = '{out} = if({inp} > 0, 1, 0)'.format(out=output + suffix +
                                                       tmpName,
                                                       inp=output + suffix)
            grass.mapcalc(exp=exp, overwrite=core.overwrite())
            grass.run_command(
                'g.rename',
                raster=[output + suffix + tmpName, output + suffix],
                quiet=True,
                overwrite=True)
    if time_step:
        for output in (beam_rad, diff_rad, refl_rad, glob_rad):
            if not output:
                continue
            exp = '{out} = {inp} * {ts}'.format(inp=output + suffix,
                                                ts=time_step,
                                                out=output + suffix + tmpName)
            grass.mapcalc(exp=exp, overwrite=core.overwrite())
            grass.run_command(
                'g.rename',
                raster=[output + suffix + tmpName, output + suffix],
                overwrite=True,
                quiet=True)
Beispiel #27
0
def main():
    options, flags = grass.parser()

    elevation_input = options['input']
    pca_shade_output = options['output']
    altitude = float(options['altitude'])
    number_of_azimuths = int(options['nazimuths'])
    z_exaggeration = float(options['zscale'])
    scale = float(options['scale'])
    units = options['units']
    shades_basename = options['shades_basename']
    pca_basename = pca_basename_user = options['pca_shades_basename']
    nprocs = int(options['nprocs'])

    full_circle = 360
    # let's use floats here and leave the consequences to the user
    smallest_angle = float(full_circle) / number_of_azimuths
    azimuths = list(frange(0, full_circle, smallest_angle))

    if not shades_basename:
        shades_basename = create_tmp_map_name('shade')
        MREMOVE.append(shades_basename)

    if not pca_basename:
        pca_basename = pca_shade_output + '_pca'
    pca_maps = [
        pca_basename + '.' + str(i) for i in range(1, number_of_azimuths + 1)
    ]
    if not pca_basename_user:
        REMOVE.extend(pca_maps)

    # here we check all the posible
    if not grass.overwrite():
        check_map_names(shades_basename,
                        grass.gisenv()['MAPSET'],
                        suffixes=azimuths)
        check_map_names(pca_basename,
                        grass.gisenv()['MAPSET'],
                        suffixes=range(1, number_of_azimuths))

    grass.info(_("Running r.relief in a loop..."))
    count = 0
    # Parallel processing
    proc_list = []
    proc_count = 0
    suffixes = []
    all_suffixes = []
    core.percent(0, number_of_azimuths, 1)
    for azimuth in azimuths:
        count += 1
        core.percent(count, number_of_azimuths, 10)

        suffix = '_' + str(azimuth)
        proc_list.append(
            Process(target=run_r_shaded_relief,
                    args=(elevation_input, shades_basename, altitude, azimuth,
                          z_exaggeration, scale, units, suffix)))

        proc_list[proc_count].start()
        proc_count += 1
        suffixes.append(suffix)
        all_suffixes.append(suffix)

        if proc_count == nprocs or proc_count == number_of_azimuths \
                or count == number_of_azimuths:
            proc_count = 0
            exitcodes = 0
            for proc in proc_list:
                proc.join()
                exitcodes += proc.exitcode

            if exitcodes != 0:
                core.fatal(_("Error during r.relief computation"))

            # Empty process list
            proc_list = []
            suffixes = []
    # FIXME: how percent really works?
    # core.percent(1, 1, 1)

    shade_maps = [shades_basename + suf for suf in all_suffixes]

    grass.info(_("Running r.pca..."))

    # not quiet=True to get percents
    grass.run_command('i.pca',
                      input=shade_maps,
                      output=pca_basename,
                      overwrite=core.overwrite())

    grass.info(
        _("Creating RGB composite from "
          "PC1 (red), PC2 (green), PC3 (blue) ..."))
    grass.run_command('r.composite',
                      red=pca_maps[0],
                      green=pca_maps[1],
                      blue=pca_maps[2],
                      output=pca_shade_output,
                      overwrite=core.overwrite(),
                      quiet=True)
    grass.raster_history(pca_shade_output)

    if pca_basename_user:
        set_color_table(pca_maps, map_=shade_maps[0])
Beispiel #28
0
def main():
    indb = options['database']
    prefix = options['basename']
    env = grass.gisenv()
    #fix sqlite3 db field string multibyte character problem
    sys.setdefaultencoding('utf-8')
    # check if 3d or not
    if flags['z']:
        d3 = 'z'
    else:
        d3 = ''
    owrite = grass.overwrite()
    # check if location it is latlong
    if grass.locn_is_latlong():
        locn = True
    else:
        locn = False
    # connection to sqlite geopaparazzi database
    import sqlite3
    conn = sqlite3.connect(indb)
    curs = conn.cursor()
    # if it is not a latlong location create a latlong location on the fly
    if not locn:
        # create new location and move to it creating new gisrc file
        new_loc = basename(grass.tempfile(create=False))
        new_loc_name = 'geopaparazzi_%s' % new_loc
        grass.create_location(dbase=env['GISDBASE'], epsg='4326',
                              location=new_loc_name,
                              desc='Temporary location for v.in.geopaparazzi')
        grc = os.getenv('GISRC')
        shutil.copyfile(grc, grc + '.old')
        newrc = open(grc, 'w')
        newrc.write('GISDBASE: %s\n' % env['GISDBASE'])
        newrc.write('LOCATION_NAME: %s\n' % new_loc_name)
        newrc.write('MAPSET: PERMANENT\n')
        newrc.write('GRASS_GUI: text\n')
        newrc.close()
        grass.run_command('db.connect', flags="d", quiet=True)

    # load bookmarks
    if flags['b']:
        # check if elements in bookmarks table are more the 0
        if checkEle(curs, 'bookmarks') != 0:
            bookname = prefix + '_book'
            pois = importGeom(bookname, 'bookmarks', curs, owrite, '')
            sql = 'CREATE TABLE %s (cat int, text text)' % bookname
            grass.write_command('db.execute', input='-', stdin=sql)
            # select attributes
            sql = "select text from bookmarks order by _id"
            allattri = returnClear(curs, sql)
            # add values using insert statement
            idcat = 1
            for row in allattri:
                values = "%d,'%s'" % (idcat, str(row))
                sql = "insert into %s values(%s)" % (bookname, values)
                grass.write_command('db.execute', input='-', stdin=sql)
                idcat += 1
            # at the end connect table to vector
            grass.run_command('v.db.connect', map=bookname,
                              table=bookname, quiet=True)
        else:
            grass.warning(_("No bookmarks found, escape them"))
    # load images
    if flags['i']:
        # check if elements in images table are more the 0
        if checkEle(curs, 'images') != 0:
            imagename = prefix + '_image'
            pois = importGeom(imagename, 'images', curs, owrite, d3)
            sql = 'CREATE TABLE %s (cat int, azim int, ' % imagename
            sql += 'path text, ts text, text text)'
            grass.write_command('db.execute', input='-', stdin=sql)
            # select attributes
            sql = "select azim, path, ts, text from images order by _id"
            allattri = returnAll(curs, sql)
            # add values using insert statement
            idcat = 1
            for row in allattri:
                values = "%d,'%d','%s','%s','%s'" % (idcat, row[0],
                                                     str(row[1]), str(row[2]),
                                                     str(row[3]))
                sql = "insert into %s values(%s)" % (imagename, values)
                grass.write_command('db.execute', input='-', stdin=sql)
                idcat += 1
            # at the end connect table to vector
            grass.run_command('v.db.connect', map=imagename, table=imagename,
                              quiet=True)
        else:
            grass.warning(_("No images found, escape them"))
    # if tracks or nodes should be imported create a connection with sqlite3
    # load notes
    if flags['n']:
        # check if elements in notes table are more the 0
        if checkEle(curs, 'notes') != 0:
            # select each categories
            categories = returnClear(curs, "select cat from notes group by cat")
            # for each category
            for cat in categories:
                # select lat, lon for create point layer
                catname = prefix + '_notes_' + cat
                pois = importGeom(catname, 'notes', curs, owrite, d3, cat)
                # select form to understand the number
                forms = returnClear(curs, "select _id from notes where cat = '%s' "
                                    "and form is not null order by _id" % cat)
                # if number of form is different from 0 and number of point
                # remove the vector because some form it is different
                if len(forms) != 0 and len(forms) != len(pois):
                    grass.run_command('g.remove', flags='f', type='vector', name=catname, quiet=True)
                    grass.warning(_("Vector %s not imported because number"
                                    " of points and form is different"))
                # if form it's 0 there is no form
                elif len(forms) == 0:
                    # create table without form
                    sql = 'CREATE TABLE %s (cat int, ts text, ' % catname
                    sql += 'text text, geopap_cat text)'
                    grass.write_command('db.execute', input='-', stdin=sql)
                    # select attributes
                    sql = "select ts, text, cat from notes where "\
                        "cat='%s' order by _id" % cat
                    allattri = returnAll(curs, sql)
                    # add values using insert statement
                    idcat = 1
                    for row in allattri:
                        values = "%d,'%s','%s','%s'" % (idcat, str(row[0]),
                                                        str(row[1]),
                                                        str(row[2]))
                        sql = "insert into %s values(%s)" % (catname, values)
                        grass.write_command('db.execute', input='-', stdin=sql)
                        idcat += 1
                    # at the end connect table to vector
                    grass.run_command('v.db.connect', map=catname,
                                      table=catname, quiet=True)
                # create table with form
                else:
                    # select all the attribute
                    sql = "select ts, text, cat, form from notes where "\
                          "cat='%s' order by _id" % cat
                    allattri = returnAll(curs, sql)
                    # return string of form's categories too create table
                    keys = returnFormKeys(allattri)
                    sql = 'CREATE TABLE %s (cat int, ts text, ' % catname
                    sql += 'text text, geopap_cat text %s)' % keys
                    grass.write_command('db.execute', input='-', stdin=sql)
                    # it's for the number of categories
                    idcat = 1
                    # for each feature insert value
                    for row in allattri:
                        values = "%d,'%s','%s','%s'," % (idcat, str(row[0]),
                                                         str(row[1]),
                                                         str(row[2]))
                        values += returnFormValues(row[3])
                        sql = "insert into %s values(%s)" % (catname, values)
                        grass.write_command('db.execute', input='-', stdin=sql)
                        idcat += 1
                    # at the end connect table with vector
                    grass.run_command('v.db.connect', map=catname,
                                      table=catname, quiet=True)
        else:
            grass.warning(_("No notes found, escape them"))
    # load tracks
    if flags['t']:
        # check if elements in bookmarks table are more the 0
        if checkEle(curs, 'gpslogs') != 0:
            tracksname = prefix + '_tracks'
            # define string for insert data at the end
            tracks = ''
            # return ids of tracks
            ids = returnClear(curs, "select _id from gpslogs")
            # for each track
            for i in ids:
                # select all the points coordinates
                tsel = "select lon, lat"
                if flags['z']:
                    tsel += ", altim"
                tsel += " from gpslog_data where logid=%s order by _id" % i
                trackpoints = returnAll(curs, tsel)
                wpoi = '\n'.join(['|'.join([str(col) for col in row]) for row in trackpoints])
                tracks += "%s\n" % wpoi
                if flags['z']:
                    tracks += 'NaN|NaN|Nan\n'
                else:
                    tracks += 'NaN|Nan\n'
            # import lines
            try:
                grass.write_command('v.in.lines', flags=d3, input='-',
                                    out=tracksname, stdin=tracks,
                                    overwrite=owrite, quiet=True)
            except CalledModuleError:
                grass.fatal(_("Error importing %s" % tracksname))
            # create table for line
            sql = 'CREATE TABLE %s (cat int, startts text, ' % tracksname
            sql += 'endts text, text text, color text, width int)'
            grass.write_command('db.execute', input='-', stdin=sql)
            sql = "select logid, startts, endts, text, color, width from" \
                  " gpslogs, gpslogsproperties where gpslogs._id=" \
                  "gpslogsproperties.logid"
            # return attributes
            allattri = returnAll(curs, sql)
            # for each line insert attribute
            for row in allattri:
                values = "%d,'%s','%s','%s','%s',%d" % (row[0], str(row[1]),
                                                        str(row[2]),
                                                        str(row[3]),
                                                        str(row[4]), row[5])
                sql = "insert into %s values(%s)" % (tracksname, values)
                grass.write_command('db.execute', input='-', stdin=sql)
            # at the end connect map with table
            grass.run_command('v.db.connect', map=tracksname,
                              table=tracksname, quiet=True)
        else:
            grass.warning(_("No tracks found, escape them"))
    # if location it's not latlong reproject it
    if not locn:
        # copy restore the original location
        shutil.copyfile(grc + '.old', grc)
        # reproject bookmarks
        if flags['b'] and checkEle(curs, 'bookmarks') != 0:
            grass.run_command('v.proj', quiet=True, input=bookname,
                              location='geopaparazzi_%s' % new_loc,
                              mapset='PERMANENT')
        # reproject images
        if flags['i'] and checkEle(curs, 'images') != 0:
            grass.run_command('v.proj', quiet=True, input=imagename,
                              location='geopaparazzi_%s' % new_loc,
                              mapset='PERMANENT')
        # reproject notes
        if flags['n'] and checkEle(curs, 'notes') != 0:
            for cat in categories:
                catname = prefix + '_node_' + cat
                grass.run_command('v.proj', quiet=True, input=catname,
                                  location='geopaparazzi_%s' % new_loc,
                                  mapset='PERMANENT')
        # reproject track
        if flags['t'] and checkEle(curs, 'gpslogs') != 0:
            grass.run_command('v.proj', quiet=True, input=tracksname,
                              location='geopaparazzi_%s' % new_loc,
                              mapset='PERMANENT')
Beispiel #29
0
def main():
    elev = options['input']
    output = options['output']
    n_dir = int(options['ndir'])
    global TMP_NAME, CLEANUP
    if options['basename']:
        TMP_NAME = options['basename']
        CLEANUP = False
    colorized_output = options['colorized_output']
    colorize_color = options['color_table']
    if colorized_output:
        color_raster_tmp = TMP_NAME + "_color_raster"
    else:
        color_raster_tmp = None
    color_raster_type = options['color_source']
    color_input = options['color_input']
    if color_raster_type == 'color_input' and not color_input:
        gcore.fatal(_("Provide raster name in color_input option"))
    if color_raster_type != 'color_input' and color_input:
        gcore.fatal(
            _("The option color_input is not needed"
              " when not using it as source for color"))
    # this would be needed only when no value would allowed
    if not color_raster_type and color_input:
        color_raster_type = 'color_input'  # enable for convenience
    if color_raster_type == 'aspect' \
            and colorize_color \
            and colorize_color not in ['default', 'aspectcolr']:
        gcore.warning(
            _("Using possibly inappropriate color table <{}>"
              " for aspect".format(colorize_color)))

    horizon_step = 360. / n_dir
    msgr = get_msgr()

    # checks if there are already some maps
    old_maps = _get_horizon_maps()
    if old_maps:
        if not gcore.overwrite():
            CLEANUP = False
            msgr.fatal(
                _("You have to first check overwrite flag or remove"
                  " the following maps:\n"
                  "{names}").format(names=','.join(old_maps)))
        else:
            msgr.warning(
                _("The following maps will be overwritten: {names}").format(
                    names=','.join(old_maps)))
    if not gcore.overwrite() and color_raster_tmp:
        check_map_name(color_raster_tmp)
    try:
        params = {}
        if options['maxdistance']:
            params['maxdistance'] = options['maxdistance']
        gcore.run_command('r.horizon',
                          elevation=elev,
                          step=horizon_step,
                          output=TMP_NAME,
                          flags='d',
                          **params)

        new_maps = _get_horizon_maps()
        if flags['o']:
            msgr.message(_("Computing openness ..."))
            expr = '{out} = 1 - (sin({first}) '.format(first=new_maps[0],
                                                       out=output)
            for horizon in new_maps[1:]:
                expr += '+ sin({name}) '.format(name=horizon)
            expr += ") / {n}.".format(n=len(new_maps))
        else:
            msgr.message(_("Computing skyview factor ..."))
            expr = '{out} = 1 - (sin( if({first} < 0, 0, {first}) ) '.format(
                first=new_maps[0], out=output)
            for horizon in new_maps[1:]:
                expr += '+ sin( if({name} < 0, 0, {name}) ) '.format(
                    name=horizon)
            expr += ") / {n}.".format(n=len(new_maps))

        grast.mapcalc(exp=expr)
        gcore.run_command('r.colors', map=output, color='grey')
    except CalledModuleError:
        msgr.fatal(
            _("r.horizon failed to compute horizon elevation "
              "angle maps. Please report this problem to developers."))
        return 1
    if colorized_output:
        if color_raster_type == 'slope':
            gcore.run_command('r.slope.aspect',
                              elevation=elev,
                              slope=color_raster_tmp)
        elif color_raster_type == 'aspect':
            gcore.run_command('r.slope.aspect',
                              elevation=elev,
                              aspect=color_raster_tmp)
        elif color_raster_type == 'dxy':
            gcore.run_command('r.slope.aspect',
                              elevation=elev,
                              dxy=color_raster_tmp)
        elif color_raster_type == 'color_input':
            color_raster_tmp = color_input
        else:
            color_raster_tmp = elev
        # don't modify user's color table for inputs
        if colorize_color \
                and color_raster_type not in ['input', 'color_input']:
            rcolors_flags = ''
            if flags['n']:
                rcolors_flags += 'n'
            gcore.run_command('r.colors',
                              map=color_raster_tmp,
                              color=colorize_color,
                              flags=rcolors_flags)
        gcore.run_command('r.shade',
                          shade=output,
                          color=color_raster_tmp,
                          output=colorized_output)
        grast.raster_history(colorized_output)
    grast.raster_history(output)
    return 0
def main():
    options, flags = gcore.parser()
    aspect = options['aspect']
    speed = options['speed']
    probability = options['probability']
    if options['particle_base']:
        particle_base = options['particle_base'] + '_'
    else:
        particle_base = None
    if options['particles']:
        particles = options['particles']
        min_size = float(options['min_size'])
        max_size = float(options['max_size'])
        comet_length = int(options['comet_length'])
    else:
        particles = min_size = max_size = comet_length = None
    try:
        total_time = int(options['total_time'])
        step = int(options['step'])
        age = int(options['age'])
        count = int(options['count'])
    except ValueError:
        gcore.fatal(_("Parameter should be integer"))

    gcore.use_temp_region()

    # create aspect in x and y direction
    aspect_x = 'aspect_x_' + str(os.getpid())
    aspect_y = 'aspect_y_' + str(os.getpid())
    xshift_tmp = 'xshift_tmp_' + str(os.getpid())
    yshift_tmp = 'yshift_tmp_' + str(os.getpid())
    TMP_RAST.append(aspect_x)
    TMP_RAST.append(aspect_y)
    grast.mapcalc(exp="{aspect_x} = cos({aspect})".format(aspect_x=aspect_x, aspect=aspect))
    grast.mapcalc(exp="{aspect_y} = sin({aspect})".format(aspect_y=aspect_y, aspect=aspect))
    grast.mapcalc(exp="{xshift} = {aspect_x}*{speed}*{t}".format(xshift=xshift_tmp, t=step, speed=speed,
                                                                 aspect_x=aspect_x), overwrite=True)
    grast.mapcalc(exp="{yshift} = {aspect_y}*{speed}*{t}".format(yshift=yshift_tmp, t=step, speed=speed,
                                                                 aspect_y=aspect_y), overwrite=True)

    # initialize
    vector_tmp1 = 'vector_tmp1_' + str(os.getpid())
    vector_tmp2 = 'vector_tmp2_' + str(os.getpid())
    vector_tmp3 = 'vector_tmp3_' + str(os.getpid())
    vector_region = 'vector_region_' + str(os.getpid())
    TMP_VECT.extend([vector_tmp1, vector_tmp2, vector_tmp3, vector_region])
    random_tmp = 'random_tmp_' + str(os.getpid())
    TMP_RAST.extend([xshift_tmp, yshift_tmp, random_tmp])
    gcore.run_command('v.in.region', output=vector_region, type='area')

    loop = 0
    vector_1 = particle_base + "{0:03d}".format(loop)
    generate_points(name=vector_1, probability_map=probability, count=count)

    grast.mapcalc(exp="{random} = int(rand(1, {maxt}))".format(random=random_tmp, maxt=age + 1))
    gcore.run_command('v.what.rast', map=vector_1, raster=random_tmp, column='t')
    write_vect_history('v.particles', options, flags, vector_1)
    vector_names = [vector_1, ]
    for time in range(0, total_time + step, step):
        vector_1 = particle_base + "{0:03d}".format(loop)
        vector_2 = particle_base + "{0:03d}".format(loop + 1)
        vector_names.append(vector_2)

        gcore.run_command('v.what.rast', map=vector_1, raster=xshift_tmp, column='xshift')
        gcore.run_command('v.what.rast', map=vector_1, raster=yshift_tmp, column='yshift')
        gcore.run_command('v.transform', layer=1, input=vector_1, output=vector_2,
                          columns='xshift:xshift,yshift:yshift', quiet=True)
        # increase age
        gcore.info("Increasing age...")
        sql = 'UPDATE {table} SET t=t+1;'.format(table=vector_2)
        gcore.run_command('db.execute', sql=sql)

        # remove old points
        gcore.info("Removing old points...")
        gcore.run_command('v.select', overwrite=True, ainput=vector_2, atype='point',
                          binput=vector_region, btype='area', operator='within', output=vector_tmp1)
        gcore.run_command('v.extract', input=vector_tmp1, layer=1, type='point',
                          where="t <= " + str(age) + " AND xshift IS NOT NULL", output=vector_tmp2, overwrite=True)

        # generate new points
        gcore.info("Generating new points...")
        count_to_generate = count - gvect.vector_info(vector_tmp2)['points']
        if count_to_generate > 0:
            generate_points(name=vector_tmp3, probability_map=probability,
                            count=count_to_generate, overwrite=True)

            gcore.info("Patchig new and old points...")
            gcore.run_command('v.patch', flags='e', input=[vector_tmp2, vector_tmp3],
                              output=vector_2, overwrite=True)
            sql = 'UPDATE {table} SET t={t} WHERE t IS NULL;'.format(table=vector_2, t=0)
            gcore.run_command('db.execute', sql=sql)
            
        write_vect_history('v.particles', options, flags, vector_2)

        loop += 1
    # Make sure the temporal database exists
    tgis.init()

    tgis.open_new_space_time_dataset(particle_base[:-1], type='stvds',
                                     temporaltype='relative',
                                     title="title", descr='desc',
                                     semantic='mean', dbif=None,
                                     overwrite=gcore.overwrite())
    # TODO: we must start from 1 because there is a bug in register_maps_in_space_time_dataset
    tgis.register_maps_in_space_time_dataset(
        type='vect', name=particle_base[:-1], maps=','.join(vector_names),
        start=str(1), end=None, unit='seconds', increment=step,
        interval=False, dbif=None)
        
    # create one vector map with multiple layers
    fd, path = tempfile.mkstemp(text=True)
    tmpfile = open(path, 'w')
    k = 0
    for vector in vector_names:
        k += 1
        layers = [x for x in range(k - comet_length + 1, k + 1) if x > 0]
        categories = list(range(len(layers), 0, -1))
        text = ''
        for layer, cat in zip(layers, categories):
            text += '{l} {c}\n'.format(l=layer, c=cat)
        coords = gcore.read_command('v.to.db', flags='p', quiet=True, map=vector,
                                    type='point', option='coor', separator=" ").strip()
        for coord in coords.split('\n'):
            coord = coord.split()
            tmpfile.write('P 1 {n_cat}\n{x} {y}\n'.format(n_cat=len(categories), x=coord[1], y=coord[2]))
            tmpfile.write(text)
    tmpfile.close()

    gcore.run_command('v.in.ascii', flags='n', overwrite=True, input=path, output=particles,
                      format='standard', separator=" ")
    os.close(fd)
    os.remove(path)
    k = 0
    sql = []
    sizes = get_sizes(max_size, min_size, comet_length)
    temporal_maps = []
    for vector in vector_names:
        k += 1
        table = 't' + str(k)
        gcore.run_command('v.db.addtable', map=particles, table=table, layer=k,
                          column="width double precision")
        temporal_maps.append(particles + ':' + str(k))
        for i in range(comet_length):
            sql.append("UPDATE {table} SET width={w:.1f} WHERE cat={c}".format(table=table,
                                                                               w=sizes[i][1], c=sizes[i][0]))
    gcore.write_command('db.execute', input='-', stdin=';\n'.join(sql))

    tgis.open_new_space_time_dataset(particles, type='stvds',
                                     temporaltype='relative',
                                     title="title", descr='desc',
                                     semantic='mean', dbif=None,
                                     overwrite=True)
    # TODO: we must start from 1 because there is a bug in register_maps_in_space_time_dataset
    tgis.register_maps_in_space_time_dataset(
        type='vect', name=particles, maps=','.join(temporal_maps),
        start=str(1), end=None, unit='seconds', increment=step,
        interval=False, dbif=None)

    write_vect_history('v.particles', options, flags, particles)
Beispiel #31
0
def main(opt, flg):
    # import functions which depend on sklearn only after parser run
    from ml_functions import (balance, explorer_clsfiers, run_classifier,
                          optimize_training, explore_SVC, plot_grid)
    from features import importances, tocsv

    msgr = get_msgr()
    indexes = None
    vect = opt['vector']
    vtraining = opt['vtraining'] if opt['vtraining'] else None
    scaler, decmp = None, None
    vlayer = opt['vlayer'] if opt['vlayer'] else vect + '_stats'
    tlayer = opt['tlayer'] if opt['tlayer'] else vect + '_training'
    rlayer = opt['rlayer'] if opt['rlayer'] else vect + '_results'

    labels = extract_classes(vtraining, 1)
    pprint(labels)

    if opt['scalar']:
        scapar = opt['scalar'].split(',')
        from sklearn.preprocessing import StandardScaler
        scaler = StandardScaler(with_mean='with_mean' in scapar,
                                with_std='with_std' in scapar)

    if opt['decomposition']:
        dec, params = (opt['decomposition'].split('|')
                       if '|' in opt['decomposition']
                       else (opt['decomposition'], ''))
        kwargs = ({k: v for k, v in (p.split('=') for p in params.split(','))}
                  if params else {})
        load_decompositions()
        decmp = DECMP[dec](**kwargs)

    # if training extract training
    if vtraining and flg['e']:
        msgr.message("Extract training from: <%s> to <%s>." % (vtraining, vect))
        extract_training(vect, vtraining, tlayer)
        flg['n'] = True

    if flg['n']:
        msgr.message("Save arrays to npy files.")
        save2npy(vect, vlayer, tlayer,
                 fcats=opt['npy_cats'], fcols=opt['npy_cols'],
                 fdata=opt['npy_data'], findx=opt['npy_index'],
                 fclss=opt['npy_tclasses'], ftdata=opt['npy_tdata'])

    # define the classifiers to use/test
    if opt['pyclassifiers'] and opt['pyvar']:
        # import classifiers to use
        mycls = imp.load_source("mycls", opt['pyclassifiers'])
        classifiers = getattr(mycls, opt['pyvar'])
    else:
        from ml_classifiers import CLASSIFIERS
        classifiers = CLASSIFIERS

    # Append the SVC classifier
    if opt['svc_c'] and opt['svc_gamma']:
            from sklearn.svm import SVC
            svc = {'name': 'SVC', 'classifier': SVC,
                   'kwargs': {'C': float(opt['svc_c']),
                              'gamma': float(opt['svc_gamma']),
                              'kernel': opt['svc_kernel']}}
            classifiers.append(svc)

    # extract classifiers from pyindx
    if opt['pyindx']:
        indexes = [i for i in get_indexes(opt['pyindx'])]
        classifiers = [classifiers[i] for i in indexes]

    num = int(opt['n_training']) if opt['n_training'] else None

    # load fron npy files
    Xt = np.load(opt['npy_tdata'])
    Yt = np.load(opt['npy_tclasses'])
    cols = np.load(opt['npy_cols'])

    # Define rules to substitute NaN, Inf, posInf, negInf values
    rules = {}
    for key in ('nan', 'inf', 'neginf', 'posinf'):
        if opt[key]:
            rules[key] = get_rules(opt[key])
    pprint(rules)

    # Substitute (skip cat column)
    Xt, rules_vals = substitute(Xt, rules, cols[1:])
    Xtoriginal = Xt

    # scale the data
    if scaler:
        msgr.message("Scaling the training data set.")
        scaler.fit(Xt, Yt)
        Xt = scaler.transform(Xt)

    # decompose data
    if decmp:
        msgr.message("Decomposing the training data set.")
        decmp.fit(Xt)
        Xt = decmp.transform(Xt)

    # Feature importances with forests of trees
    if flg['f']:
        np.save('training_transformed.npy', Xt)
        importances(Xt, Yt, cols[1:],
                    csv=opt['imp_csv'], img=opt['imp_fig'],
                    # default parameters to save the matplotlib figure
                    **dict(dpi=300, transparent=False, bbox_inches='tight'))

    # optimize the training set
    if flg['o']:
        ind_optimize = (int(opt['pyindx_optimize']) if opt['pyindx_optimize']
                        else 0)
        cls = classifiers[ind_optimize]
        msgr.message("Find the optimum training set.")
        best, Xbt, Ybt = optimize_training(cls, Xt, Yt,
                                           labels, #{v: k for k, v in labels.items()},
                                           scaler, decmp,
                                           num=num, maxiterations=1000)
        msg = "    - save the optimum training data set to: %s."
        msgr.message(msg % opt['npy_btdata'])
        np.save(opt['npy_btdata'], Xbt)
        msg = "    - save the optimum training classes set to: %s."
        msgr.message(msg % opt['npy_btclasses'])
        np.save(opt['npy_btclasses'], Ybt)

    # balance the data
    if flg['b']:
        msg = "Balancing the training data set, each class have <%d> samples."
        msgr.message(msg % num)
        Xbt, Ybt = balance(Xt, Yt, num)
    else:
        if not flg['o']:
            Xbt = (np.load(opt['npy_btdata'])
                   if os.path.isfile(opt['npy_btdata']) else Xt)
            Ybt = (np.load(opt['npy_btclasses'])
                   if os.path.isfile(opt['npy_btclasses']) else Yt)

    # scale the data
    if scaler:
        msgr.message("Scaling the training data set.")
        scaler.fit(Xbt, Ybt)
        Xt = scaler.transform(Xt)
        Xbt = scaler.transform(Xbt)

    if flg['d']:
        C_range = [float(c) for c in opt['svc_c_range'].split(',') if c]
        gamma_range = [float(g) for g in opt['svc_gamma_range'].split(',') if g]
        kernel_range = [str(s) for s in opt['svc_kernel_range'].split(',') if s]
        poly_range = [int(i) for i in opt['svc_poly_range'].split(',') if i]
        allkwargs = dict(C=C_range, gamma=gamma_range,
                         kernel=kernel_range, degree=poly_range)
        kwargs = {}
        for k in allkwargs:
            if allkwargs[k]:
                kwargs[k] = allkwargs[k]
        msgr.message("Exploring the SVC domain.")
        grid = explore_SVC(Xbt, Ybt, n_folds=5, n_jobs=int(opt['svc_n_jobs']),
                           **kwargs)
        import pickle
        krnlstr = '_'.join(s for s in opt['svc_kernel_range'].split(',') if s)
        pkl = open('grid%s.pkl' % krnlstr, 'w')
        pickle.dump(grid, pkl)
        pkl.close()
#        pkl = open('grid.pkl', 'r')
#        grid = pickle.load(pkl)
#        pkl.close()
        plot_grid(grid, save=opt['svc_img'])

    # test the accuracy of different classifiers
    if flg['t']:
        # test different classifiers
        msgr.message("Exploring different classifiers.")
        msgr.message("cls_id   cls_name          mean     max     min     std")

        res = explorer_clsfiers(classifiers, Xt, Yt, labels=labels,
                                indexes=indexes, n_folds=5,
                                bv=flg['v'], extra=flg['x'])
        # TODO: sort(order=...) is working only in the terminal, why?
        #res.sort(order='mean')
        with open(opt['csv_test_cls'], 'w') as csv:
            csv.write(tocsv(res))

    if flg['c']:
        # classify
        data = np.load(opt['npy_data'])
        indx = np.load(opt['npy_index'])

        # Substitute using column values
        data, dummy = substitute(data, rules, cols[1:])
        Xt = data[indx]

        if scaler:
            msgr.message("Scaling the training data set.")
            scaler.fit(Xt, Yt)
            Xt = scaler.transform(Xt)
            msgr.message("Scaling the whole data set.")
            data = scaler.transform(data)
        if decmp:
            msgr.message("Decomposing the training data set.")
            decmp.fit(Xt)
            Xt = decmp.transform(Xt)
            msgr.message("Decompose the whole data set.")
            data = decmp.transform(data)
        cats = np.load(opt['npy_cats'])

        np.save('data_filled_scaled.npy', data)
        tcols = []
        for cls in classifiers:
            report = (open(opt['report_class'], "w")
                      if opt['report_class'] else sys.stdout)
            run_classifier(cls, Xt, Yt, Xt, Yt, labels, data,
                           report=report)
            tcols.append((cls['name'], 'INTEGER'))

        import pickle
        with open('classification_results.pkl', 'w') as res:
              pickle.dump(classifiers, res)
        #classifiers = pickle.load(res)
        msgr.message("Export the results to layer: <%s>" % str(rlayer))
        export_results(vect, classifiers, cats, rlayer, vtraining, tcols,
                       overwrite(), pkl='res.pkl', append=flg['a'])
#        res.close()

    if flg['r']:
        rules = ('\n'.join(['%d %s' % (k, v)
                            for k, v in get_colors(vtraining).items()])
                 if vtraining else None)

        msgr.message("Export the layer with results to raster")
        with Vector(vect, mode='r') as vct:
            tab = vct.dblinks.by_name(rlayer).table()
            rasters = [c for c in tab.columns]
            rasters.remove(tab.key)

        v2rst = Module('v.to.rast')
        rclrs = Module('r.colors')
        for rst in rasters:
            v2rst(input=vect, layer=rlayer, type='area',
                  use='attr', attrcolumn=rst.encode(),
                  output=(opt['rst_names'] % rst).encode(),
                  memory=1000, overwrite=overwrite())
            if rules:
                rclrs(map=rst.encode(), rules='-', stdin_=rules)
Beispiel #32
0
def main(opts, flgs):
    TMPVECT = []
    DEBUG = True if flgs["d"] else False
    atexit.register(cleanup, vect=TMPVECT, debug=DEBUG)
    # check input maps
    rhydro = ["kind_label", "discharge", "id_point", "id_plant"]
    rother = ["kind_label", "discharge", "id_point", "id_plant"]
    ovwr = overwrite()

    try:
        hydro = check_required_columns(
            opts["hydro"], int(opts["hydro_layer"]), rhydro, "hydro"
        )
        if opts["other"]:
            other = check_required_columns(
                opts["other"], opts["other_layer"], rother, "other"
            )
        else:
            other = None
        # minflow = check_float_or_raster(opts['minflow'])
    except ParameterError as exc:
        exception2error(exc)

    # start working
    hydro.open("r")
    el, mset = (
        opts["elevation"].split("@")
        if "@" in opts["elevation"]
        else (opts["elevation"], "")
    )
    elev = RasterRow(name=el, mapset=mset)
    elev.open("r")
    # import ipdb; ipdb.set_trace()
    plants, skipped = read_plants(
        hydro,
        elev=elev,
        restitution=opts["hydro_kind_turbine"],
        intake=opts["hydro_kind_intake"],
    )
    hydro.close()
    rvname, rvmset = (
        opts["river"].split("@") if "@" in opts["river"] else (opts["river"], "")
    )

    vplants = opts["output_plants"] if opts["output_plants"] else "tmpplants"
    # FIXME: I try with tmpplants in my mapset and it doesn'work
    if opts["output_plants"] == "":
        TMPVECT.append(vplants)
    with VectorTopo(rvname, rvmset, mode="r") as river:
        write_plants(plants, vplants, river, elev, overwrite=ovwr)

    if skipped:
        for skip in skipped:
            print("Plant: %r, Point: %r, kind: %r" % skip)
    elev.close()

    # compute a buffer around the plants
    buff = vplants + "buff"
    v.buffer(input=vplants, type="line", output=buff, distance=0.1, overwrite=ovwr)
    TMPVECT.append(buff)
    # return all the river segments that are not already with plants
    v.overlay(
        flags="t",
        ainput=opts["river"],
        atype="line",
        binput=buff,
        operator="not",
        output=opts["output_streams"],
        overwrite=ovwr,
    )
#%end
#%flag
#% key: r
#% description: Remove all operational maps
#%end

import pdb

import numpy as np

import grass.script as grass
from grass.pygrass.messages import get_msgr
from grass.pygrass.raster import RasterRow
from grass.script.core import overwrite, parser, read_command, run_command

ow = overwrite()


def yield_pix_process(opts, flgs, yield_, yield_surface):

    YPIX = ''

    expr_surf = 'analysis_surface=' + opts['energy_map'] + '>0'
    run_command('r.mapcalc', overwrite=ow, expression=expr_surf)

    run_command("r.mapcalc",
                overwrite=ow,
                expression='yield_pix1 = (' + yield_ + '/' + yield_surface +
                ')*((ewres()*nsres())/10000)')

    run_command("r.null", map="yield_pix1", null=0)
Beispiel #34
0
def main(opts, flgs):
    ow = overwrite()

    output = opts['output_basename']

    forest = opts['forest']
    boundaries = opts['boundaries']
    yield_ = opts['forest_column_yield']
    management = opts['forest_column_management']
    treatment = opts['forest_column_treatment']
    yield_surface = opts['forest_column_yield_surface']
    roughness = opts['forest_column_roughness']
    forest_roads = opts['forest_roads']

    rivers = opts['rivers']
    lakes = opts['lakes']

    vector_forest = opts['forest']

    tech_bioenergyHF = output + '_tech_bioenergyHF'
    tech_bioenergyC = output + '_tech_bioenergyC'
    tech_bioenergy = output + '_tech_bioenergy'

    ######## start import and convert ########

    run_command("g.region", vect=boundaries)
    run_command("v.to.rast",
                input=forest,
                output="yield",
                use="attr",
                attrcolumn=yield_,
                overwrite=True)
    run_command("v.to.rast",
                input=forest,
                output="yield_surface",
                use="attr",
                attrcolumn=yield_surface,
                overwrite=True)
    run_command("v.to.rast",
                input=forest,
                output="treatment",
                use="attr",
                attrcolumn=treatment,
                overwrite=True)
    run_command("v.to.rast",
                input=forest,
                output="management",
                use="attr",
                attrcolumn=management,
                overwrite=True)

    run_command("v.to.rast",
                input=forest_roads,
                output="forest_roads",
                use="val",
                overwrite=True)

    run_command("r.null", map='yield', null=0)
    run_command("r.null", map='yield_surface', null=0)
    run_command("r.null", map='treatment', null=0)
    run_command("r.null", map='management', null=0)

    ######## end import and convert ########

    ######## temp patch to link map and fields ######

    management = "management"
    treatment = "treatment"
    yield_surface = "yield_surface"
    yield_ = "yield"
    forest_roads = "forest_roads"

    ######## end temp patch to link map and fields ######

    if roughness == '':
        run_command("r.mapcalc", overwrite=ow, expression='roughness=0')
        roughness = 'roughness'
    else:
        run_command("v.to.rast",
                    input=forest,
                    output="roughness",
                    use="attr",
                    attrcolumn=roughness,
                    overwrite=True)
        run_command("r.null", map='roughness', null=0)

    CCEXTR = 'cable_crane_extraction = if(' + yield_ + '>0 && slope>' + opts[
        'slp_min_cc'] + ' && slope<=' + opts[
            'slp_max_cc'] + ' && extr_dist<' + opts['dist_max_cc'] + ', 1)'

    FWEXTR = 'forwarder_extraction = if(' + yield_ + '>0 && slope<=' + opts[
        'slp_max_fw'] + ' && ' + management + '==1 && (' + roughness + '==0 || ' + roughness + '==1 || ' + roughness + '==99999) && extr_dist<' + opts[
            'dist_max_fw'] + ', 1)'

    OEXTR = 'other_extraction = if(' + yield_ + '>0 && slope<=' + opts[
        'slp_max_cop'] + ' && ' + management + '==2 && (' + roughness + '==0 || ' + roughness + '==1 || ' + roughness + '==99999) && extr_dist<' + opts[
            'dist_max_cop'] + ', 1)'

    EHF = tech_bioenergyHF + ' = technical_surface*(if(' + management + '==1 && ' + treatment + '==1 || ' + management + '==1 && ' + treatment + '==99999, yield_pix*' + opts[
        'energy_tops_hf'] + ', if(' + management + '==1 && ' + treatment + '==2, yield_pix *' + opts[
            'energy_tops_hf'] + ' + yield_pix * ' + opts[
                'energy_cormometric_vol_hf'] + ')))'

    ECC = tech_bioenergyC + ' = technical_surface*(if(' + management + ' == 2, yield_pix*' + opts[
        'energy_tops_cop'] + '))'

    ET = tech_bioenergy + ' = (' + tech_bioenergyC + ' + ' + tech_bioenergyHF + ')'

    run_command("r.param.scale",
                overwrite=ow,
                input=opts['dtm'],
                output="morphometric_features",
                size=3,
                method="feature")
    run_command("r.slope.aspect",
                overwrite=ow,
                elevation=opts['dtm'],
                slope="slope_deg")
    run_command("r.mapcalc",
                overwrite=ow,
                expression='pix_cross = ((ewres()+nsres())/2)/ cos(slope_deg)')
    run_command("r.mapcalc",
                overwrite=ow,
                expression='yield_pix1 = (' + yield_ + '/' + yield_surface +
                ')*((ewres()*nsres())/10000)')
    run_command("r.null", map="yield_pix1", null=0)
    run_command("r.null", map="morphometric_features", null=0)

    exprmap = 'frict_surf_extr = pix_cross + if(yield_pix1<=0, 99999) + if(morphometric_features==6, 99999)'

    if rivers != '':
        run_command("v.to.rast",
                    input=rivers,
                    output="rivers",
                    use="val",
                    overwrite=True)
        run_command("r.null", map="rivers", null=0)
        rivers = "rivers"
        exprmap += '+ if(' + rivers + '>=1, 99999)'

    if lakes != '':
        run_command("v.to.rast",
                    input=lakes,
                    output="lakes",
                    use="val",
                    overwrite=True)
        run_command("r.null", map="lakes", null=0)
        lakes = "lakes"
        exprmap += '+ if(' + lakes + '>=1, 99999)'

    #morphometric_features==6 -> peaks
    #run_command("r.mapcalc", overwrite=ow,expression='frict_surf_extr = if(morphometric_features==6, 99999) + if(rivers>=1 || lakes>=1, 99999) + if(yield_pix1<=0, 99999) + pix_cross')
    run_command("r.mapcalc", overwrite=ow, expression=exprmap)

    run_command("r.cost",
                overwrite=ow,
                input="frict_surf_extr",
                output="extr_dist",
                stop_points=vector_forest,
                start_rast=forest_roads,
                max_cost=1500)

    run_command("r.slope.aspect",
                overwrite=ow,
                elevation=opts['dtm'],
                slope="slope",
                format="percent")
    run_command("r.mapcalc", overwrite=ow, expression=CCEXTR)
    run_command("r.mapcalc", overwrite=ow, expression=FWEXTR)
    run_command("r.mapcalc", overwrite=ow, expression=OEXTR)
    run_command("r.null", map="cable_crane_extraction", null=0)
    run_command("r.null", map="forwarder_extraction", null=0)
    run_command("r.null", map="other_extraction", null=0)
    run_command(
        "r.mapcalc",
        overwrite=ow,
        expression=
        'technical_surface = cable_crane_extraction + forwarder_extraction + other_extraction'
    )
    # run_command("r.statistics", overwrite=ow,
    #             base="compartment", cover="technical_surface", method="sum",
    #             output="techn_pix_comp")
    # run_command("r.mapcalc", overwrite=ow,
    #             expression='yield_pix2 = yield/(technical_surface*@techn_pix_comp)')
    # run_command("r.null", map="yield_pix2", null=0)
    # run_command("r.mapcalc", overwrite=ow,
    #             expression=YPIX % (1 if flgs['u'] else 0, 0 if flgs['u'] else 1,))

    run_command("r.mapcalc", overwrite=ow, expression="yield_pix=yield_pix1")

    run_command("r.mapcalc", overwrite=ow, expression=EHF)
    run_command("r.mapcalc", overwrite=ow, expression=ECC)
    run_command("r.mapcalc", overwrite=ow, expression=ET)

    with RasterRow(tech_bioenergy) as pT:
        T = np.array(pT)

    print("Resulted maps: " + output + "_tech_bioenergyHF, " + output +
          "_tech_bioenergyC, " + output + "_tech_bioenergy")
    print("Total bioenergy stimated (Mwh): %.2f" % np.nansum(T))

    if flgs['r']:
        remove_map(opts, flgs)
Beispiel #35
0
def main():
    coords = options['coordinates']
    input = options['input']
    output = options['output']
    fs = options['separator']
    proj_in = options['proj_in']
    proj_out = options['proj_out']
    ll_in = flags['i']
    ll_out = flags['o']
    decimal = flags['d']
    copy_input = flags['e']
    include_header = flags['c']

    #### check for cs2cs
    if not grass.find_program('cs2cs'):
	grass.fatal(_("cs2cs program not found, install PROJ.4 first: http://proj.maptools.org"))

    #### check for overenthusiasm
    if proj_in and ll_in:
	grass.fatal(_("Choose only one input parameter method"))

    if proj_out and ll_out:
	grass.fatal(_("Choose only one output parameter method")) 

    if ll_in and ll_out:
	grass.fatal(_("Choise only one auto-projection parameter method"))

    if output and not grass.overwrite() and os.path.exists(output):
	grass.fatal(_("Output file already exists")) 

    if not coords and not input:
        grass.fatal(_("One of <coordinates> and <input> must be given"))
    if coords and input:
        grass.fatal(_("Options <coordinates> and <input> are mutually exclusive"))

    #### parse field separator
    # FIXME: input_x,y needs to split on multiple whitespace between them
    if fs == ',':
        ifs = ofs = ','
    else:
	try:
	    ifs, ofs = fs.split(',')
	except ValueError:
	    ifs = ofs = fs

    ifs = separator(ifs)
    ofs = separator(ofs)

    #### set up projection params
    s = grass.read_command("g.proj", flags='j')
    kv = parse_key_val(s)
    if "XY location" in kv['+proj'] and (ll_in or ll_out):
	grass.fatal(_("Unable to project to or from a XY location")) 

    in_proj = None

    if ll_in:
	in_proj = "+proj=longlat +datum=WGS84"
	grass.verbose("Assuming LL WGS84 as input, current projection as output ")

    if ll_out:
	in_proj = grass.read_command('g.proj', flags = 'jf')

    if proj_in:
	in_proj = proj_in

    if not in_proj:
	grass.verbose("Assuming current location as input")
        in_proj = grass.read_command('g.proj', flags = 'jf')
    
    in_proj = in_proj.strip()
    grass.verbose("Input parameters: '%s'" % in_proj)

    out_proj = None

    if ll_out:
	out_proj = "+proj=longlat +datum=WGS84"
	grass.verbose("Assuming current projection as input, LL WGS84 as output ")

    if ll_in:
	out_proj = grass.read_command('g.proj', flags = 'jf')

    if proj_out:
	out_proj = proj_out

    if not out_proj:
	grass.fatal(_("Missing output projection parameters "))
    out_proj = out_proj.strip()
    grass.verbose("Output parameters: '%s'" % out_proj)

    #### set up input file
    if coords:
        x, y = coords.split(',')
        tmpfile = grass.tempfile()
        fd = open(tmpfile, "w")
        fd.write("%s%s%s\n" % (x, ifs, y))
        fd.close()
        inf = file(tmpfile)
    else:
        if input == '-':
            infile = None
            inf = sys.stdin
        else:
            infile = input
            if not os.path.exists(infile):
                grass.fatal(_("Unable to read input data"))
            inf = file(infile)
            grass.debug("input file=[%s]" % infile)
    
    #### set up output file
    if not output:
	outfile = None
	outf = sys.stdout
    else:
	outfile = output
	outf = open(outfile, 'w')
	grass.debug("output file=[%s]" % outfile) 

    #### set up output style
    if not decimal:
	outfmt = ["-w5"]
    else:
	outfmt = ["-f", "%.8f"]
    if not copy_input:
	copyinp = []
    else:
	copyinp = ["-E"]

    #### do the conversion
    # Convert cs2cs DMS format to GRASS DMS format:
    #   cs2cs | sed -e 's/d/:/g' -e "s/'/:/g"  -e 's/"//g'

    cmd = ['cs2cs'] + copyinp + outfmt + in_proj.split() + ['+to'] + out_proj.split()
    p = grass.Popen(cmd, stdin = grass.PIPE, stdout = grass.PIPE)

    tr = TrThread(ifs, inf, p.stdin)
    tr.start()

    if not copy_input:
	if include_header:
	    outf.write("x%sy%sz\n" % (ofs, ofs))
	for line in p.stdout:
            try:
                xy, z = line.split(' ', 1)
                x, y = xy.split('\t')
            except ValueError:
                grass.fatal(line)
            
	    outf.write('%s%s%s%s%s\n' % \
                       (x.strip(), ofs, y.strip(), ofs, z.strip()))
    else:
	if include_header:
	    outf.write("input_x%sinput_y%sx%sy%sz\n" % (ofs, ofs, ofs, ofs))
	for line in p.stdout:
            inXYZ, x, rest = line.split('\t')
            inX, inY = inXYZ.split(' ')[:2]
	    y, z = rest.split(' ', 1)
	    outf.write('%s%s%s%s%s%s%s%s%s\n' % \
                       (inX.strip(), ofs, inY.strip(), ofs, x.strip(), \
		        ofs, y.strip(), ofs, z.strip()))

    p.wait()

    if p.returncode != 0:
	grass.warning(_("Projection transform probably failed, please investigate"))
Beispiel #36
0
def main(opts, flgs):
    ow = overwrite()

    output = opts['output_basename']

    forest = opts['forest']
    boundaries = opts['boundaries']
    yield_ = opts['forest_column_yield']
    management = opts['forest_column_management']
    treatment = opts['forest_column_treatment']
    yield_surface = opts['forest_column_yield_surface']

    l_bioenergyHF = output + '_l_bioenergyHF'
    l_bioenergyC = output + '_l_bioenergyC'
    l_bioenergy = output + '_l_bioenergy'

    ######## start import and convert ########

    run_command("g.region", vect=boundaries)
    run_command("v.to.rast",
                input=forest,
                output="yield",
                use="attr",
                attrcolumn=yield_,
                overwrite=True)
    run_command("v.to.rast",
                input=forest,
                output="yield_surface",
                use="attr",
                attrcolumn=yield_surface,
                overwrite=True)
    run_command("v.to.rast",
                input=forest,
                output="treatment",
                use="attr",
                attrcolumn=treatment,
                overwrite=True)
    run_command("v.to.rast",
                input=forest,
                output="management",
                use="attr",
                attrcolumn=management,
                overwrite=True)

    run_command("r.null", map='yield', null=0)
    run_command("r.null", map='yield_surface', null=0)
    run_command("r.null", map='treatment', null=0)
    run_command("r.null", map='management', null=0)

    ######## end import and convert ########

    ######## temp patch to link map and fields ######

    management = "management"
    treatment = "treatment"
    yield_surface = "yield_surface"
    yield_ = "yield"

    ######## end temp patch to link map and fields ######

    #import pdb; pdb.set_trace()
    #treatment=1 final felling, treatment=2 thinning
    ECOHF = l_bioenergyHF + ' = if(' + management + '==1 && ' + treatment + '==1 || ' + management + ' == 1 && ' + treatment + '==99999, yield_pix1*%f, if(' + management + '==1 && ' + treatment + '==2, yield_pix1*%f + yield_pix1*%f))'

    #ECOHF = 'ecological_bioenergyHF = if(management==1 && treatment==1 || management == 1 && treatment==99999,yield_pix1*'+opts['energy_tops_hf']+', if(management==1 && treatment==2, yield_pix1*'+opts['energy_tops_hf']+' + yield_pix1*'+opts['energy_cormometric_vol_hf']+'))'

    ECOCC = l_bioenergyC + ' = if(' + management + '==2, yield_pix1*' + opts[
        'energy_tops_cop'] + ')'

    ECOT = l_bioenergy + ' = (' + l_bioenergyHF + ' + ' + l_bioenergyC + ')'

    run_command("r.mapcalc",
                overwrite=ow,
                expression='yield_pix1 = (' + yield_ + '/' + yield_surface +
                ')*((ewres()*nsres())/10000)')

    run_command("r.mapcalc",
                overwrite=ow,
                expression=ECOHF % tuple(
                    map(float, (opts['energy_tops_hf'], opts['energy_tops_hf'],
                                opts['energy_cormometric_vol_hf']))))

    run_command("r.mapcalc", overwrite=ow, expression=ECOCC)

    run_command("r.mapcalc", overwrite=ow, expression=ECOT)

    with RasterRow(l_bioenergy) as pT:
        T = np.array(pT)

    print("Resulted maps: " + output + "_l_bioenergyHF, " + output +
          "_l_bioenergyC, " + output + "_l_bioenergy")
    print("Total bioenergy stimated (Mwh): %.2f" % np.nansum(T))
Beispiel #37
0
def main():
    options, flags = gcore.parser()

    elevation = options["elevation"]
    strds = options["output"]
    basename = strds
    start_water_level = float(options["start_water_level"])
    end_water_level = float(options["end_water_level"])
    water_level_step = float(options["water_level_step"])
    # if options['coordinates']:
    #    options['coordinates'].split(',')
    # passing coordinates parameter as is
    coordinates = options["coordinates"]
    seed_raster = options["seed_raster"]
    if seed_raster and coordinates:
        gcore.fatal(
            _("Both seed raster and coordinates cannot be specified"
              " together, please specify only one of them."))

    time_unit = options["time_unit"]
    time_step = options[
        "time_step"]  # temporal fucntions accepts only string now
    if int(time_step) <= 0:
        gcore.fatal(
            _("Time step must be greater than zero."
              " Please specify number > 0."))

    mapset = gcore.gisenv()["MAPSET"]
    title = _("r.lake series")
    desctiption = _("r.lake series")

    water_levels = [
        step for step in frange(start_water_level, end_water_level,
                                water_level_step)
    ]
    outputs = [
        "%s%s%s" % (basename, "_", water_level) for water_level in water_levels
    ]

    if not gcore.overwrite():
        check_maps_exist(outputs, mapset)

    kwargs = {}
    if seed_raster:
        kwargs["seed"] = seed_raster
    elif coordinates:
        kwargs["coordinates"] = coordinates

    if flags["n"]:
        pass_flags = "n"
    else:
        pass_flags = None

    for i, water_level in enumerate(water_levels):
        try:
            gcore.run_command(
                "r.lake",
                flags=pass_flags,
                elevation=elevation,
                lake=outputs[i],
                water_level=water_level,
                overwrite=gcore.overwrite(
                ),  # TODO: really works? Its seems that hardcoding here False does not prevent overwriting.
                **kwargs)
        except CalledModuleError:
            # remove maps created so far, try to remove also i-th map
            remove_raster_maps(outputs[:i], quiet=True)
            gcore.fatal(
                _("r.lake command failed. Check above error messages."
                  " Try different water levels or seed points."))
    gcore.info(_("Registering created maps into temporal dataset..."))

    # Make sure the temporal database exists
    tgis.init()

    tgis.open_new_stds(
        strds,
        type="strds",
        temporaltype="relative",
        title=title,
        descr=desctiption,
        semantic="sum",
        dbif=None,
        overwrite=gcore.overwrite(),
    )
    # TODO: we must start from 1 because there is a bug in register_maps_in_space_time_dataset
    tgis.register_maps_in_space_time_dataset(
        type="raster",
        name=basename,
        maps=",".join(outputs),
        start=str(1),
        end=None,
        unit=time_unit,
        increment=time_step,
        interval=False,
        dbif=None,
    )