Esempio n. 1
0
def main():
    options, flags = gcore.parser()

    raster = options['raster']
    raster_3d = options['raster_3d']
    vector = options['vector']
    sep = separator(options['separator'])

    perform_pre_checks = not flags['s']
    dry_run = flags['d']

    if perform_pre_checks:
        if raster:
            check_file(raster, 'raster', sep=sep)
        if raster_3d:
            check_file(raster_3d, 'raster_3d', sep=sep)
        if vector:
            check_file(vector, 'vector', sep=sep)

    if dry_run:
        gcore.message(_("Checks successful"))
        return

    if raster:
        rename_from_file(filename=raster, map_type='raster', sep=sep,
                         safe_input=perform_pre_checks)
    if raster_3d:
        rename_from_file(filename=raster_3d, map_type='raster_3d', sep=sep,
                         safe_input=perform_pre_checks)
    if vector:
        rename_from_file(filename=vector, map_type='vector', sep=sep,
                         safe_input=perform_pre_checks)
Esempio n. 2
0
def main():
    options, flags = gcore.parser()

    raster = options["raster"]
    raster_3d = options["raster_3d"]
    vector = options["vector"]
    sep = separator(options["separator"])

    perform_pre_checks = not flags["s"]
    dry_run = flags["d"]

    if perform_pre_checks:
        if raster:
            check_file(raster, "raster", sep=sep)
        if raster_3d:
            check_file(raster_3d, "raster_3d", sep=sep)
        if vector:
            check_file(vector, "vector", sep=sep)

    if dry_run:
        gcore.message(_("Checks successful"))
        return

    if raster:
        rename_from_file(filename=raster,
                         map_type="raster",
                         sep=sep,
                         safe_input=perform_pre_checks)
    if raster_3d:
        rename_from_file(
            filename=raster_3d,
            map_type="raster_3d",
            sep=sep,
            safe_input=perform_pre_checks,
        )
    if vector:
        rename_from_file(filename=vector,
                         map_type="vector",
                         sep=sep,
                         safe_input=perform_pre_checks)
Esempio n. 3
0
def main():
    mapname = options["map"]
    layer = options["layer"]
    option = options["option"]
    units = options["units"]
    sort = options["sort"]
    fs = separator(options["separator"])

    nuldev = open(os.devnull, "w")

    if not grass.find_file(mapname, "vector")["file"]:
        grass.fatal(_("Vector map <%s> not found") % mapname)

    if int(layer) in grass.vector_db(mapname):
        colnames = grass.vector_columns(mapname,
                                        layer,
                                        getDict=False,
                                        stderr=nuldev)
        isConnection = True
    else:
        isConnection = False
        colnames = ["cat"]

    if option == "coor":
        extracolnames = ["x", "y", "z"]
    else:
        extracolnames = [option]

    if units == "percent":
        unitsp = "meters"
    elif units:
        unitsp = units
    else:
        unitsp = None

    # NOTE: we suppress -1 cat and 0 cat
    if isConnection:
        f = grass.vector_db(map=mapname)[int(layer)]
        p = grass.pipe_command("v.db.select",
                               flags="e",
                               quiet=True,
                               map=mapname,
                               layer=layer)
        records1 = []
        catcol = -1
        ncols = 0
        for line in p.stdout:
            cols = decode(line).rstrip("\r\n").split("|")
            if catcol == -1:
                ncols = len(cols)
                for i in range(0, ncols):
                    if cols[i] == f["key"]:
                        catcol = i
                        break
                if catcol == -1:
                    grass.fatal(
                        _("There is a table connected to input vector map '%s', but "
                          "there is no key column '%s'.") %
                        (mapname, f["key"]))
                continue
            if cols[catcol] == "-1" or cols[catcol] == "0":
                continue
            records1.append(cols[:catcol] + [int(cols[catcol])] +
                            cols[(catcol + 1):])
        p.wait()
        if p.returncode != 0:
            sys.exit(1)

        records1.sort(key=lambda r: r[catcol])

        if len(records1) == 0:
            try:
                grass.fatal(
                    _("There is a table connected to input vector map '%s', but "
                      "there are no categories present in the key column '%s'. Consider using "
                      "v.to.db to correct this.") % (mapname, f["key"]))
            except KeyError:
                pass

        # fetch the requested attribute sorted by cat:
        p = grass.pipe_command(
            "v.to.db",
            flags="p",
            quiet=True,
            map=mapname,
            option=option,
            layer=layer,
            units=unitsp,
        )
        records2 = []
        for line in p.stdout:
            fields = decode(line).rstrip("\r\n").split("|")
            if fields[0] in ["cat", "-1", "0"]:
                continue
            records2.append([int(fields[0])] + fields[1:])
        p.wait()
        records2.sort()

        # make pre-table
        # len(records1) may not be the same as len(records2) because
        # v.db.select can return attributes that are not linked to features.
        records3 = []
        for r2 in records2:
            rec = list(filter(lambda r1: r1[catcol] == r2[0], records1))
            if len(rec) > 0:
                res = rec[0] + r2[1:]
            elif flags["d"]:
                res = [r2[0]] + [""] * (ncols - 1) + r2[1:]
            else:
                continue
            records3.append(res)
    else:
        catcol = 0
        records1 = []
        p = grass.pipe_command("v.category",
                               inp=mapname,
                               layer=layer,
                               option="print")
        for line in p.stdout:
            field = int(decode(line).rstrip())
            if field > 0:
                records1.append(field)
        p.wait()
        records1.sort()
        records1 = uniq(records1)

        # make pre-table
        p = grass.pipe_command(
            "v.to.db",
            flags="p",
            quiet=True,
            map=mapname,
            option=option,
            layer=layer,
            units=unitsp,
        )
        records3 = []
        for line in p.stdout:
            fields = decode(line).rstrip("\r\n").split("|")
            if fields[0] in ["cat", "-1", "0"]:
                continue
            records3.append([int(fields[0])] + fields[1:])
        p.wait()
        records3.sort()

    # print table header
    if not flags["c"]:
        sys.stdout.write(fs.join(colnames + extracolnames) + "\n")

    # make and print the table:
    numcols = len(colnames) + len(extracolnames)

    # calculate percents if requested
    if units == "percent" and option != "coor":
        # calculate total value
        total = 0
        for r in records3:
            total += float(r[-1])

        # calculate percentages
        records4 = [float(r[-1]) * 100 / total for r in records3]
        if type(records1[0]) == int:
            records3 = [[r1] + [r4] for r1, r4 in zip(records1, records4)]
        else:
            records3 = [r1 + [r4] for r1, r4 in zip(records1, records4)]

    # sort results
    if sort:
        if sort == "asc":
            if option == "coor":
                records3.sort(
                    key=lambda r: (float(r[-3]), float(r[-2]), float(r[-1])))
            else:
                records3.sort(key=lambda r: float(r[-1]))
        else:
            if option == "coor":
                records3.sort(
                    key=lambda r: (float(r[-3]), float(r[-2]), float(r[-1])),
                    reverse=True,
                )
            else:
                records3.sort(key=lambda r: float(r[-1]), reverse=True)

    for r in records3:
        sys.stdout.write(fs.join(map(str, r)) + "\n")
Esempio n. 4
0
def main():
    vinput = options['input']
    columns = options['columns'].split(',')
    binary = options['developed_column']
    level = options['subregions_column']
    sep = gutils.separator(options['separator'])
    minim = int(options['min_variables'])
    dredge = flags['d']
    if options['max_variables']:
        maxv = int(options['max_variables'])
    else:
        maxv = len(columns)
    if dredge and minim > maxv:
        gscript.fatal(_("Minimum number of predictor variables is larger than maximum number"))

    global TMP_CSV, TMP_RSCRIPT, TMP_POT
    TMP_CSV = gscript.tempfile(create=False) + '.csv'
    TMP_RSCRIPT = gscript.tempfile()
    include_level = True
    distinct = gscript.read_command('v.db.select', flags='c', map=vinput,
                                    columns="distinct {level}".format(level=level)).strip()
    if len(distinct.splitlines()) <= 1:
        include_level = False
        single_level = distinct.splitlines()[0]
    with open(TMP_RSCRIPT, 'w') as f:
        f.write(rscript)
    TMP_POT = gscript.tempfile(create=False) + '_potential.csv'
    columns += [binary]
    if include_level:
        columns += [level]
    where = "{c} IS NOT NULL".format(c=columns[0])
    for c in columns[1:]:
        where += " AND {c} IS NOT NULL".format(c=c)
    gscript.run_command('v.db.select', map=vinput, columns=columns, separator='comma', where=where, file=TMP_CSV)

    if dredge:
        gscript.info(_("Running automatic model selection ..."))
    else:
        gscript.info(_("Computing model..."))

    cmd = ['Rscript', TMP_RSCRIPT, '-i', TMP_CSV, '-r', binary,
           '-m', str(minim), '-x', str(maxv), '-o', TMP_POT, '-d', 'TRUE' if dredge else 'FALSE']
    if include_level:
        cmd += ['-l', level]
    p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    stdout, stderr = p.communicate()
    gscript.warning(gscript.decode(stderr))
    if p.returncode != 0:
        gscript.warning(gscript.decode(stderr))
        gscript.fatal(_("Running R script failed, check messages above"))

    gscript.info(_("Best model summary:"))
    gscript.info("-------------------------")
    gscript.message(gscript.decode(stdout))

    with open(TMP_POT, 'r') as fin, open(options['output'], 'w') as fout:
        i = 0
        for line in fin.readlines():
            row = line.strip().split('\t')
            row = [each.strip('"') for each in row]
            if i == 0:
                row[0] = "ID"
                row[1] = "Intercept"
            if i == 1 and not include_level:
                row[0] = single_level
            fout.write(sep.join(row))
            fout.write('\n')
            i += 1
Esempio n. 5
0
def main():
    global tmp

    fs = separator(options['separator'])
    threeD = flags['z']

    prog = 'v.in.lines'

    if threeD:
        do3D = 'z'
    else:
        do3D = ''


    tmp = grass.tempfile()


    #### set up input file
    if options['input'] == '-':
        infile = None
        inf = sys.stdin
    else:
        infile = options['input']
        if not os.path.exists(infile):
            grass.fatal(_("Unable to read input file <%s>") % infile)
        grass.debug("input file=[%s]" % infile)


    if not infile:
        # read from stdin and write to tmpfile (v.in.mapgen wants a real file)
        outf = file(tmp, 'w')
        for line in inf:
            if len(line.lstrip()) == 0 or line[0] == '#':
                continue
            outf.write(line.replace(fs, ' '))

        outf.close()
        runfile = tmp
    else:
        # read from a real file
        if fs == ' ':
            runfile = infile
        else:
            inf = file(infile)
            outf = file(tmp, 'w')

            for line in inf:
                if len(line.lstrip()) == 0 or line[0] == '#':
                    continue
                outf.write(line.replace(fs, ' '))

            inf.close()
            outf.close()
            runfile = tmp


    ##### check that there are at least two columns (three if -z is given)
    inf = file(runfile)
    for line in inf:
        if len(line.lstrip()) == 0 or line[0] == '#':
            continue
        numcols = len(line.split())
        break
    inf.close()
    if (do3D and numcols < 3) or (not do3D and numcols < 2):
        grass.fatal(_("Not enough data columns. (incorrect fs setting?)"))


    grass.run_command('v.in.mapgen', flags = 'f' + do3D,
                      input = runfile, output = options['output'])
Esempio n. 6
0
def main():
    coords = options["coordinates"]
    input = options["input"]
    output = options["output"]
    fs = options["separator"]
    proj_in = options["proj_in"]
    proj_out = options["proj_out"]
    ll_in = flags["i"]
    ll_out = flags["o"]
    decimal = flags["d"]
    copy_input = flags["e"]
    include_header = flags["c"]

    # check for cs2cs
    if not gcore.find_program("cs2cs"):
        gcore.fatal(
            _("cs2cs program not found, install PROJ first: \
            https://proj.org"))

    # parse field separator
    # FIXME: input_x,y needs to split on multiple whitespace between them
    if fs == ",":
        ifs = ofs = ","
    else:
        try:
            ifs, ofs = fs.split(",")
        except ValueError:
            ifs = ofs = fs

    ifs = separator(ifs)
    ofs = separator(ofs)

    # set up projection params
    s = gcore.read_command("g.proj", flags="j")
    kv = parse_key_val(s)
    if "XY location" in kv["+proj"] and (ll_in or ll_out):
        gcore.fatal(_("Unable to project to or from a XY location"))

    in_proj = None

    if ll_in:
        in_proj = "+proj=longlat +datum=WGS84"
        gcore.verbose(
            "Assuming LL WGS84 as input, current projection as output ")

    if ll_out:
        in_proj = gcore.read_command("g.proj", flags="jf")

    if proj_in:
        if "+" in proj_in:
            in_proj = proj_in
        else:
            gcore.fatal(_("Invalid PROJ.4 input specification"))

    if not in_proj:
        gcore.verbose("Assuming current location as input")
        in_proj = gcore.read_command("g.proj", flags="jf")

    in_proj = in_proj.strip()
    gcore.verbose("Input parameters: '%s'" % in_proj)

    out_proj = None

    if ll_out:
        out_proj = "+proj=longlat +datum=WGS84"
        gcore.verbose(
            "Assuming current projection as input, LL WGS84 as output ")

    if ll_in:
        out_proj = gcore.read_command("g.proj", flags="jf")

    if proj_out:
        if "+" in proj_out:
            out_proj = proj_out
        else:
            gcore.fatal(_("Invalid PROJ.4 output specification"))

    if not out_proj:
        gcore.fatal(_("Missing output projection parameters "))
    out_proj = out_proj.strip()
    gcore.verbose("Output parameters: '%s'" % out_proj)

    # set up input file
    if coords:
        x, y = coords.split(",")
        tmpfile = gcore.tempfile()
        fd = open(tmpfile, "w")
        fd.write("%s%s%s\n" % (x, ifs, y))
        fd.close()
        inf = open(tmpfile)
    else:
        if input == "-":
            infile = None
            inf = sys.stdin
        else:
            infile = input
            if not os.path.exists(infile):
                gcore.fatal(_("Unable to read input data"))
            inf = open(infile)
            gcore.debug("input file=[%s]" % infile)

    # set up output file
    if not output:
        outfile = None
        outf = sys.stdout
    else:
        outfile = output
        outf = open(outfile, "w")
        gcore.debug("output file=[%s]" % outfile)

    # set up output style
    if not decimal:
        outfmt = ["-w5"]
    else:
        outfmt = ["-f", "%.8f"]
    if not copy_input:
        copyinp = []
    else:
        copyinp = ["-E"]

    # do the conversion
    # Convert cs2cs DMS format to GRASS DMS format:
    #   cs2cs | sed -e 's/d/:/g' -e "s/'/:/g"  -e 's/"//g'

    cmd = ["cs2cs"
           ] + copyinp + outfmt + in_proj.split() + ["+to"] + out_proj.split()

    p = gcore.Popen(cmd, stdin=gcore.PIPE, stdout=gcore.PIPE)

    tr = TrThread(ifs, inf, p.stdin)
    tr.start()

    if not copy_input:
        if include_header:
            outf.write("x%sy%sz\n" % (ofs, ofs))
        for line in p.stdout:
            try:
                xy, z = decode(line).split(" ", 1)
                x, y = xy.split("\t")
            except ValueError:
                gcore.fatal(line)

            outf.write("%s%s%s%s%s\n" %
                       (x.strip(), ofs, y.strip(), ofs, z.strip()))
    else:
        if include_header:
            outf.write("input_x%sinput_y%sx%sy%sz\n" % (ofs, ofs, ofs, ofs))
        for line in p.stdout:
            inXYZ, x, rest = decode(line).split("\t")
            inX, inY = inXYZ.split(" ")[:2]
            y, z = rest.split(" ", 1)
            outf.write("%s%s%s%s%s%s%s%s%s\n" % (
                inX.strip(),
                ofs,
                inY.strip(),
                ofs,
                x.strip(),
                ofs,
                y.strip(),
                ofs,
                z.strip(),
            ))

    p.wait()

    if p.returncode != 0:
        gcore.warning(
            _("Projection transform probably failed, please investigate"))
Esempio n. 7
0
def main():
    vinput = options["input"]
    columns = options["columns"].split(",")
    binary = options["developed_column"]
    level = options["subregions_column"]
    sep = gutils.separator(options["separator"])
    minim = int(options["min_variables"])
    dredge = flags["d"]
    if options["max_variables"]:
        maxv = int(options["max_variables"])
    else:
        maxv = len(columns)
    if dredge and minim > maxv:
        gscript.fatal(
            _("Minimum number of predictor variables is larger than maximum number"
              ))

    global TMP_CSV, TMP_RSCRIPT, TMP_POT
    TMP_CSV = gscript.tempfile(create=False) + ".csv"
    TMP_RSCRIPT = gscript.tempfile()
    include_level = True
    distinct = gscript.read_command(
        "v.db.select",
        flags="c",
        map=vinput,
        columns="distinct {level}".format(level=level),
    ).strip()
    if len(distinct.splitlines()) <= 1:
        include_level = False
        single_level = distinct.splitlines()[0]
    with open(TMP_RSCRIPT, "w") as f:
        f.write(rscript)
    TMP_POT = gscript.tempfile(create=False) + "_potential.csv"
    columns += [binary]
    if include_level:
        columns += [level]
    where = "{c} IS NOT NULL".format(c=columns[0])
    for c in columns[1:]:
        where += " AND {c} IS NOT NULL".format(c=c)
    gscript.run_command(
        "v.db.select",
        map=vinput,
        columns=columns,
        separator="comma",
        where=where,
        file=TMP_CSV,
    )

    if dredge:
        gscript.info(_("Running automatic model selection ..."))
    else:
        gscript.info(_("Computing model..."))

    cmd = [
        "Rscript",
        TMP_RSCRIPT,
        "-i",
        TMP_CSV,
        "-r",
        binary,
        "-m",
        str(minim),
        "-x",
        str(maxv),
        "-o",
        TMP_POT,
        "-d",
        "TRUE" if dredge else "FALSE",
    ]
    if include_level:
        cmd += ["-l", level]
    p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    stdout, stderr = p.communicate()
    gscript.warning(gscript.decode(stderr))
    if p.returncode != 0:
        gscript.warning(gscript.decode(stderr))
        gscript.fatal(_("Running R script failed, check messages above"))

    gscript.info(_("Best model summary:"))
    gscript.info("-------------------------")
    gscript.message(gscript.decode(stdout))

    with open(TMP_POT, "r") as fin, open(options["output"], "w") as fout:
        i = 0
        for line in fin.readlines():
            row = line.strip().split("\t")
            row = [each.strip('"') for each in row]
            if i == 0:
                row[0] = "ID"
                row[1] = "Intercept"
            if i == 1 and not include_level:
                row[0] = single_level
            fout.write(sep.join(row))
            fout.write("\n")
            i += 1
Esempio n. 8
0
def main():
    # Take into account those extra pixels we'll be a addin'
    max_cols = int(options['maxcols']) - int(options['overlap'])
    max_rows = int(options['maxrows']) - int(options['overlap'])

    if max_cols == 0:
        gcore.fatal(
            _("It is not possible to set 'maxcols=%s' and "
              "'overlap=%s'. Please set maxcols>overlap" %
              (options['maxcols'], options['overlap'])))
    elif max_rows == 0:
        gcore.fatal(
            _("It is not possible to set 'maxrows=%s' and "
              "'overlap=%s'. Please set maxrows>overlap" %
              (options['maxrows'], options['overlap'])))
    # destination projection
    if not options['destproj']:
        dest_proj = gcore.read_command('g.proj', quiet=True,
                                       flags='jf').rstrip('\n')
        if not dest_proj:
            gcore.fatal(_('g.proj failed'))
    else:
        dest_proj = options['destproj']
    gcore.debug("Getting destination projection -> '%s'" % dest_proj)

    # projection scale
    if not options['destscale']:
        ret = gcore.parse_command('g.proj', quiet=True, flags='j')
        if not ret:
            gcore.fatal(_('g.proj failed'))

        if '+to_meter' in ret:
            dest_scale = ret['+to_meter'].strip()
        else:
            gcore.warning(
                _("Scale (%s) not found, assuming '1'") % '+to_meter')
            dest_scale = '1'
    else:
        dest_scale = options['destscale']
    gcore.debug('Getting destination projection scale -> %s' % dest_scale)

    # set up the projections
    srs_source = {
        'proj': options['sourceproj'],
        'scale': float(options['sourcescale'])
    }
    srs_dest = {'proj': dest_proj, 'scale': float(dest_scale)}

    if options['region']:
        gcore.run_command('g.region', quiet=True, region=options['region'])
    dest_bbox = gcore.region()
    gcore.debug('Getting destination region')

    # output field separator
    fs = separator(options['separator'])

    # project the destination region into the source:
    gcore.verbose('Projecting destination region into source...')
    dest_bbox_points = bboxToPoints(dest_bbox)

    dest_bbox_source_points, errors_dest = projectPoints(dest_bbox_points,
                                                         source=srs_dest,
                                                         dest=srs_source)

    if len(dest_bbox_source_points) == 0:
        gcore.fatal(
            _("There are no tiles available. Probably the output "
              "projection system it is not compatible with the "
              "projection of the current location"))

    source_bbox = pointsToBbox(dest_bbox_source_points)

    gcore.verbose('Projecting source bounding box into destination...')

    source_bbox_points = bboxToPoints(source_bbox)

    source_bbox_dest_points, errors_source = projectPoints(source_bbox_points,
                                                           source=srs_source,
                                                           dest=srs_dest)

    x_metric = 1 / dest_bbox['ewres']
    y_metric = 1 / dest_bbox['nsres']

    gcore.verbose('Computing length of sides of source bounding box...')

    source_bbox_dest_lengths = sideLengths(source_bbox_dest_points, x_metric,
                                           y_metric)

    # Find the skewedness of the two directions.
    # Define it to be greater than one
    # In the direction (x or y) in which the world is least skewed (ie north south in lat long)
    # Divide the world into strips. These strips are as big as possible contrained by max_
    # In the other direction do the same thing.
    # There's some recomputation of the size of the world that's got to come in
    # here somewhere.

    # For now, however, we are going to go ahead and request more data than is necessary.
    # For small regions far from the critical areas of projections this makes very little difference
    # in the amount of data gotten.
    # We can make this efficient for big regions or regions near critical
    # points later.

    bigger = []
    bigger.append(max(source_bbox_dest_lengths['x']))
    bigger.append(max(source_bbox_dest_lengths['y']))
    maxdim = (max_cols, max_rows)

    # Compute the number and size of tiles to use in each direction
    # I'm making fairly even sized tiles
    # They differer from each other in height and width only by one cell
    # I'm going to make the numbers all simpler and add this extra cell to
    # every tile.

    gcore.message(_('Computing tiling...'))
    tiles = [-1, -1]
    tile_base_size = [-1, -1]
    tiles_extra_1 = [-1, -1]
    tile_size = [-1, -1]
    tileset_size = [-1, -1]
    tile_size_overlap = [-1, -1]
    for i in range(len(bigger)):
        # make these into integers.
        # round up
        bigger[i] = int(bigger[i] + 1)
        tiles[i] = int((bigger[i] / maxdim[i]) + 1)
        tile_size[i] = tile_base_size[i] = int(bigger[i] / tiles[i])
        tiles_extra_1[i] = int(bigger[i] % tiles[i])
        # This is adding the extra pixel (remainder) to all of the tiles:
        if tiles_extra_1[i] > 0:
            tile_size[i] = tile_base_size[i] + 1
        tileset_size[i] = int(tile_size[i] * tiles[i])
        # Add overlap to tiles (doesn't effect tileset_size
        tile_size_overlap[i] = tile_size[i] + int(options['overlap'])

    gcore.verbose("There will be %d by %d tiles each %d by %d cells" %
                  (tiles[0], tiles[1], tile_size[0], tile_size[1]))

    ximax = tiles[0]
    yimax = tiles[1]

    min_x = source_bbox['w']
    min_y = source_bbox['s']
    max_x = source_bbox['e']
    max_y = source_bbox['n']
    span_x = (max_x - min_x)
    span_y = (max_y - min_y)

    xi = 0
    tile_bbox = {'w': -1, 's': -1, 'e': -1, 'n': -1}

    if errors_dest > 0:
        gcore.warning(
            _("During computation %i tiles could not be created" %
              errors_dest))

    while xi < ximax:
        tile_bbox['w'] = float(min_x) + (float(xi) * float(
            tile_size[0]) / float(tileset_size[0])) * float(span_x)
        tile_bbox['e'] = float(min_x) + (float(xi + 1) * float(
            tile_size_overlap[0]) / float(tileset_size[0])) * float(span_x)
        yi = 0
        while yi < yimax:
            tile_bbox['s'] = float(min_y) + (float(yi) * float(
                tile_size[1]) / float(tileset_size[1])) * float(span_y)
            tile_bbox['n'] = float(min_y) + (float(yi + 1) * float(
                tile_size_overlap[1]) / float(tileset_size[1])) * float(span_y)
            tile_bbox_points = bboxToPoints(tile_bbox)
            tile_dest_bbox_points, errors = projectPoints(tile_bbox_points,
                                                          source=srs_source,
                                                          dest=srs_dest)
            tile_dest_bbox = pointsToBbox(tile_dest_bbox_points)
            if bboxesIntersect(tile_dest_bbox, dest_bbox):
                if flags['w']:
                    print("bbox=%s,%s,%s,%s&width=%s&height=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                elif flags['g']:
                    print("w=%s;s=%s;e=%s;n=%s;cols=%s;rows=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                else:
                    print("%s%s%s%s%s%s%s%s%s%s%s" %
                          (tile_bbox['w'], fs, tile_bbox['s'], fs,
                           tile_bbox['e'], fs, tile_bbox['n'], fs,
                           tile_size_overlap[0], fs, tile_size_overlap[1]))
            yi += 1
        xi += 1
Esempio n. 9
0
def main():
    # Take into account those extra pixels we'll be a addin'
    max_cols = int(options['maxcols']) - int(options['overlap'])
    max_rows = int(options['maxrows']) - int(options['overlap'])

    if max_cols == 0:
        gcore.fatal(_("It is not possibile to set 'maxcols=%s' and "
                      "'overlap=%s'. Please set maxcols>overlap" %
                      (options['maxcols'], options['overlap'])))
    elif max_rows == 0:
        gcore.fatal(_("It is not possibile to set 'maxrows=%s' and "
                      "'overlap=%s'. Please set maxrows>overlap" %
                      (options['maxrows'], options['overlap'])))
    # destination projection
    if not options['destproj']:
        dest_proj = gcore.read_command('g.proj',
                                       quiet=True,
                                       flags='jf').rstrip('\n')
        if not dest_proj:
            gcore.fatal(_('g.proj failed'))
    else:
        dest_proj = options['destproj']
    gcore.debug("Getting destination projection -> '%s'" % dest_proj)

    # projection scale
    if not options['destscale']:
        ret = gcore.parse_command('g.proj',
                                  quiet=True,
                                  flags='j')
        if not ret:
            gcore.fatal(_('g.proj failed'))

        if '+to_meter' in ret:
            dest_scale = ret['+to_meter'].strip()
        else:
            gcore.warning(
                _("Scale (%s) not found, assuming '1'") %
                '+to_meter')
            dest_scale = '1'
    else:
        dest_scale = options['destscale']
    gcore.debug('Getting destination projection scale -> %s' % dest_scale)

    # set up the projections
    srs_source = {'proj': options['sourceproj'],
                  'scale': float(options['sourcescale'])}
    srs_dest = {'proj': dest_proj, 'scale': float(dest_scale)}

    if options['region']:
        gcore.run_command('g.region',
                          quiet=True,
                          region=options['region'])
    dest_bbox = gcore.region()
    gcore.debug('Getting destination region')

    # output field separator
    fs = separator(options['separator'])

    # project the destination region into the source:
    gcore.verbose('Projecting destination region into source...')
    dest_bbox_points = bboxToPoints(dest_bbox)

    dest_bbox_source_points, errors_dest = projectPoints(dest_bbox_points,
                                                         source=srs_dest,
                                                         dest=srs_source)

    if len(dest_bbox_source_points) == 0:
        gcore.fatal(_("There are no tiles available. Probably the output "
                      "projection system it is not compatible with the "
                      "projection of the current location"))

    source_bbox = pointsToBbox(dest_bbox_source_points)

    gcore.verbose('Projecting source bounding box into destination...')

    source_bbox_points = bboxToPoints(source_bbox)

    source_bbox_dest_points, errors_source = projectPoints(source_bbox_points,
                                                           source=srs_source,
                                                           dest=srs_dest)

    x_metric = 1 / dest_bbox['ewres']
    y_metric = 1 / dest_bbox['nsres']

    gcore.verbose('Computing length of sides of source bounding box...')

    source_bbox_dest_lengths = sideLengths(source_bbox_dest_points,
                                           x_metric, y_metric)

    # Find the skewedness of the two directions.
    # Define it to be greater than one
    # In the direction (x or y) in which the world is least skewed (ie north south in lat long)
    # Divide the world into strips. These strips are as big as possible contrained by max_
    # In the other direction do the same thing.
    # Theres some recomputation of the size of the world that's got to come in
    # here somewhere.

    # For now, however, we are going to go ahead and request more data than is necessary.
    # For small regions far from the critical areas of projections this makes very little difference
    # in the amount of data gotten.
    # We can make this efficient for big regions or regions near critical
    # points later.

    bigger = []
    bigger.append(max(source_bbox_dest_lengths['x']))
    bigger.append(max(source_bbox_dest_lengths['y']))
    maxdim = (max_cols, max_rows)

    # Compute the number and size of tiles to use in each direction
    # I'm making fairly even sized tiles
    # They differer from each other in height and width only by one cell
    # I'm going to make the numbers all simpler and add this extra cell to
    # every tile.

    gcore.message(_('Computing tiling...'))
    tiles = [-1, -1]
    tile_base_size = [-1, -1]
    tiles_extra_1 = [-1, -1]
    tile_size = [-1, -1]
    tileset_size = [-1, -1]
    tile_size_overlap = [-1, -1]
    for i in range(len(bigger)):
        # make these into integers.
        # round up
        bigger[i] = int(bigger[i] + 1)
        tiles[i] = int((bigger[i] / maxdim[i]) + 1)
        tile_size[i] = tile_base_size[i] = int(bigger[i] / tiles[i])
        tiles_extra_1[i] = int(bigger[i] % tiles[i])
        # This is adding the extra pixel (remainder) to all of the tiles:
        if tiles_extra_1[i] > 0:
            tile_size[i] = tile_base_size[i] + 1
        tileset_size[i] = int(tile_size[i] * tiles[i])
        # Add overlap to tiles (doesn't effect tileset_size
        tile_size_overlap[i] = tile_size[i] + int(options['overlap'])

    gcore.verbose("There will be %d by %d tiles each %d by %d cells" %
                  (tiles[0], tiles[1], tile_size[0], tile_size[1]))

    ximax = tiles[0]
    yimax = tiles[1]

    min_x = source_bbox['w']
    min_y = source_bbox['s']
    max_x = source_bbox['e']
    max_y = source_bbox['n']
    span_x = (max_x - min_x)
    span_y = (max_y - min_y)

    xi = 0
    tile_bbox = {'w': -1, 's': -1, 'e': -1, 'n': -1}

    if errors_dest > 0:
        gcore.warning(_("During computation %i tiles could not be created" %
                        errors_dest))

    while xi < ximax:
        tile_bbox['w'] = float(
            min_x) + (float(xi) * float(tile_size[0]) / float(tileset_size[0])) * float(span_x)
        tile_bbox['e'] = float(min_x) + (float(xi + 1) * float(tile_size_overlap[0]
                                                               ) / float(tileset_size[0])) * float(span_x)
        yi = 0
        while yi < yimax:
            tile_bbox['s'] = float(
                min_y) + (float(yi) * float(tile_size[1]) / float(tileset_size[1])) * float(span_y)
            tile_bbox['n'] = float(min_y) + (
                float(yi + 1) * float(tile_size_overlap[1]) /
                float(tileset_size[1])) * float(span_y)
            tile_bbox_points = bboxToPoints(tile_bbox)
            tile_dest_bbox_points, errors = projectPoints(tile_bbox_points,
                                                          source=srs_source,
                                                          dest=srs_dest)
            tile_dest_bbox = pointsToBbox(tile_dest_bbox_points)
            if bboxesIntersect(tile_dest_bbox, dest_bbox):
                if flags['w']:
                    print("bbox=%s,%s,%s,%s&width=%s&height=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                elif flags['g']:
                    print("w=%s;s=%s;e=%s;n=%s;cols=%s;rows=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                else:
                    print("%s%s%s%s%s%s%s%s%s%s%s" %
                          (tile_bbox['w'], fs, tile_bbox['s'], fs,
                           tile_bbox['e'], fs, tile_bbox['n'], fs,
                           tile_size_overlap[0], fs, tile_size_overlap[1]))
            yi += 1
        xi += 1
Esempio n. 10
0
def main():
    global tmp

    fs = separator(options["separator"])
    threeD = flags["z"]

    if threeD:
        do3D = "z"
    else:
        do3D = ""

    tmp = grass.tempfile()

    # set up input file
    if options["input"] == "-":
        infile = None
        inf = sys.stdin
    else:
        infile = options["input"]
        if not os.path.exists(infile):
            grass.fatal(_("Unable to read input file <%s>") % infile)
        grass.debug("input file=[%s]" % infile)

    if not infile:
        # read from stdin and write to tmpfile (v.in.mapgen wants a real file)
        outf = open(tmp, "w")
        for line in inf:
            if len(line.lstrip()) == 0 or line[0] == "#":
                continue
            outf.write(line.replace(fs, " "))

        outf.close()
        runfile = tmp
    else:
        # read from a real file
        if fs == " ":
            runfile = infile
        else:
            inf = open(infile)
            outf = open(tmp, "w")

            for line in inf:
                if len(line.lstrip()) == 0 or line[0] == "#":
                    continue
                outf.write(line.replace(fs, " "))

            inf.close()
            outf.close()
            runfile = tmp

    # check that there are at least two columns (three if -z is given)
    inf = open(runfile)
    for line in inf:
        if len(line.lstrip()) == 0 or line[0] == "#":
            continue
        numcols = len(line.split())
        break
    inf.close()
    if (do3D and numcols < 3) or (not do3D and numcols < 2):
        grass.fatal(_("Not enough data columns. (incorrect fs setting?)"))

    grass.run_command(
        "v.in.mapgen", flags="f" + do3D, input=runfile, output=options["output"]
    )
def main():
    # if no output filename, output to stdout
    input = options['input']
    player = int(options['player'])
    output = options['output']
    idcolumn = options['idcolumn'] if options['idcolumn'] else False
    sep = separator(options['separator'])
    bidirectional = flags['b']
    global tempmapname
    tempmapname = 'neighborhoodmatrix_tempmap_%d' % os.getpid()
    #TODO: automatically determine the first available layer in file
    blayer = player + 1

    gscript.run_command('v.category',
                        input=input,
                        output=tempmapname,
                        option='add',
                        layer=blayer,
                        type='boundary',
                        quiet=True,
                        overwrite=True)
    vtodb_results = gscript.read_command('v.to.db',
                                         flags='p',
                                         map=tempmapname,
                                         type='boundary',
                                         option='sides',
                                         layer=blayer,
                                         qlayer=player,
                                         quiet=True)

    #put result into a list of integer pairs
    temp_neighbors = []
    for line in vtodb_results.splitlines():
        if line.split('|')[1] != '-1' and line.split('|')[2] != '-1':
            temp_neighbors.append(
                [int(line.split('|')[1]),
                 int(line.split('|')[2])])

    #temp_neighbors.sort()

    #if user wants bidirectional matrix, add the inversed pairs to the original
    if bidirectional:
        neighbors_reversed = []
        for pair in temp_neighbors:
            neighbors_reversed.append([pair[1], pair[0]])
        temp_neighbors += neighbors_reversed

    #uniqify the list of integer pairs
    neighbors = sorted(
        [list(x) for x in set(tuple(x) for x in temp_neighbors)])

    currentcat = ''
    if output and output != '-':
        out = open(output, 'w')
    for pair in neighbors:
        if idcolumn:
            # While pair[0] stays the same we don't have to call v.db.select
            # again and again to get the id
            if currentcat != pair[0]:
                currentcat = pair[0]
                fromid = gscript.read_command('v.db.select',
                                              map=input,
                                              column=idcolumn,
                                              where="cat=%d" % pair[0],
                                              layer=player,
                                              flags="c",
                                              quiet=True).rstrip()
            toid = gscript.read_command('v.db.select',
                                        map=input,
                                        column=idcolumn,
                                        where="cat=%d" % pair[1],
                                        layer=player,
                                        flags="c",
                                        quiet=True).rstrip()
            if output and output != '-':
                out.write(fromid + sep + toid + '\n')
            else:
                print((fromid + sep + toid))
        else:
            if output and output != '-':
                out.write(str(pair[0]) + sep + str(pair[1]) + '\n')
            else:
                print((str(pair[0]) + sep + str(pair[1])))
    if output and output != '-':
        out.close()

    sys.exit()
Esempio n. 12
0
def main():
    # if no output filename, output to stdout
    input = options["input"]
    player = int(options["player"])
    output = options["output"]
    idcolumn = options["idcolumn"] if options["idcolumn"] else False
    sep = separator(options["separator"])
    bidirectional = flags["b"]
    global tempmapname
    tempmapname = "neighborhoodmatrix_tempmap_%d" % os.getpid()
    # TODO: automatically determine the first available layer in file
    blayer = player + 1

    gscript.run_command(
        "v.category",
        input=input,
        output=tempmapname,
        option="add",
        layer=blayer,
        type="boundary",
        quiet=True,
        overwrite=True,
    )
    vtodb_results = gscript.read_command(
        "v.to.db",
        flags="p",
        map=tempmapname,
        type="boundary",
        option="sides",
        layer=blayer,
        qlayer=player,
        quiet=True,
    )

    # put result into a list of integer pairs
    temp_neighbors = []
    for line in vtodb_results.splitlines():
        if line.split("|")[1] != "-1" and line.split("|")[2] != "-1":
            temp_neighbors.append(
                [int(line.split("|")[1]),
                 int(line.split("|")[2])])

    # temp_neighbors.sort()

    # if user wants bidirectional matrix, add the inversed pairs to the original
    if bidirectional:
        neighbors_reversed = []
        for pair in temp_neighbors:
            neighbors_reversed.append([pair[1], pair[0]])
        temp_neighbors += neighbors_reversed

    # uniqify the list of integer pairs
    neighbors = sorted(
        [list(x) for x in set(tuple(x) for x in temp_neighbors)])

    currentcat = ""
    if output and output != "-":
        out = open(output, "w")
    for pair in neighbors:
        if idcolumn:
            # While pair[0] stays the same we don't have to call v.db.select
            # again and again to get the id
            if currentcat != pair[0]:
                currentcat = pair[0]
                fromid = gscript.read_command(
                    "v.db.select",
                    map=input,
                    column=idcolumn,
                    where="cat=%d" % pair[0],
                    layer=player,
                    flags="c",
                    quiet=True,
                ).rstrip()
            toid = gscript.read_command(
                "v.db.select",
                map=input,
                column=idcolumn,
                where="cat=%d" % pair[1],
                layer=player,
                flags="c",
                quiet=True,
            ).rstrip()
            if output and output != "-":
                out.write(fromid + sep + toid + "\n")
            else:
                print((fromid + sep + toid))
        else:
            if output and output != "-":
                out.write(str(pair[0]) + sep + str(pair[1]) + "\n")
            else:
                print((str(pair[0]) + sep + str(pair[1])))
    if output and output != "-":
        out.close()

    sys.exit()
Esempio n. 13
0
def main():
    check_addon_installed('r.object.geometry', fatal=True)

    dev_start = options['development_start']
    dev_end = options['development_end']
    only_file = flags['l']
    patches_per_subregion = flags['s']
    if not only_file:
        repeat = int(options['repeat'])
        compactness_means = [float(each) for each in options['compactness_mean'].split(',')]
        compactness_ranges = [float(each) for each in options['compactness_range'].split(',')]
        discount_factors = [float(each) for each in options['discount_factor'].split(',')]
    patches_file = options['patch_sizes']
    threshold = float(options['patch_threshold'])
    sep = gutils.separator(options['separator'])
    # v.clean removes size <= threshold, we want to keep size == threshold
    threshold -= 1e-6

    # compute cell size
    region = gcore.region()
    res = (region['nsres'] + region['ewres']) / 2.
    coeff = float(gcore.parse_command('g.proj', flags='g')['meters'])
    cell_size = res * res * coeff * coeff

    tmp_name = 'tmp_futures_calib_' + str(os.getpid()) + '_'
    global TMP

    orig_patch_diff = tmp_name + 'orig_patch_diff'
    TMP.append(orig_patch_diff)
    tmp_clump = tmp_name + 'tmp_clump'
    TMP.append(tmp_clump)
    if patches_per_subregion:
        tmp_cat_clump = tmp_name + 'tmp_cat_clump'
        TMP.append(tmp_cat_clump)

    gcore.message(_("Analyzing original patches..."))
    diff_development(dev_start, dev_end, options['subregions'], orig_patch_diff)
    data = write_data = patch_analysis(orig_patch_diff, threshold, tmp_clump)
    if patches_per_subregion:
        subregions_data = patch_analysis_per_subregion(orig_patch_diff, options['subregions'],
                                                       threshold, tmp_clump, tmp_cat_clump)
        # if there is just one column, write the previous analysis result
        if len(subregions_data.keys()) > 1:
            write_data = subregions_data
    write_patches_file(write_data, cell_size, patches_file, sep)

    if only_file:
        return

    area, perimeter = data.T
    compact = compactness(area, perimeter)

    # area histogram
    area = area / cell_size
    bin_width = 1.  # automatic ways to determine bin width do not perform well in this case
    hist_bins_area_orig = int(np.ptp(area) / bin_width)
    hist_range_area_orig = (np.min(area), np.max(area))
    histogram_area_orig, _edges = np.histogram(area, bins=hist_bins_area_orig,
                                               range=hist_range_area_orig, density=True)
    histogram_area_orig = histogram_area_orig * 100  # to get percentage for readability

    # compactness histogram
    bin_width = 0.1
    hist_bins_compactness_orig = int(np.ptp(compact) / bin_width)
    hist_range_compactness_orig = (np.min(compact), np.max(compact))
    histogram_compactness_orig, _edges = np.histogram(compact, bins=hist_bins_compactness_orig,
                                                      range=hist_range_compactness_orig, density=True)
    histogram_compactness_orig = histogram_compactness_orig * 100  # to get percentage for readability

    seed = int(options['random_seed'])
    nprocs = int(options['nprocs'])
    count = 0
    proc_count = 0
    queue_list = []
    proc_list = []
    num_all = len(compactness_means) * len(compactness_ranges) * len(discount_factors)
    with open(options['calibration_results'], 'w') as f:
        for com_mean in compactness_means:
            for com_range in compactness_ranges:
                for discount_factor in discount_factors:
                    count += 1
                    q = Queue()
                    p = Process(target=run_one_combination,
                                args=(count, num_all, repeat, seed, dev_start, com_mean, com_range,
                                      discount_factor, patches_file, options, threshold,
                                      hist_bins_area_orig, hist_range_area_orig, hist_bins_compactness_orig,
                                      hist_range_compactness_orig, cell_size, histogram_area_orig, histogram_compactness_orig,
                                      tmp_name, q))
                    p.start()
                    queue_list.append(q)
                    proc_list.append(p)
                    proc_count += 1
                    seed += 1
                    if proc_count == nprocs or count == num_all:
                        for i in range(proc_count):
                            proc_list[i].join()
                            data = queue_list[i].get()
                            if not data:
                                continue
                            f.write(','.join([str(data['input_discount_factor']), str(data['area_distance']),
                                              str(data['input_compactness_mean']), str(data['input_compactness_range']),
                                              str(data['compactness_distance'])]))
                            f.write('\n')
                        f.flush()
                        proc_count = 0
                        proc_list = []
                        queue_list = []
    # compute combined normalized error
    process_calibration(options['calibration_results'])
Esempio n. 14
0
def main():
    coords = options['coordinates']
    input = options['input']
    output = options['output']
    fs = options['separator']
    proj_in = options['proj_in']
    proj_out = options['proj_out']
    ll_in = flags['i']
    ll_out = flags['o']
    decimal = flags['d']
    copy_input = flags['e']
    include_header = flags['c']

    #### check for cs2cs
    if not grass.find_program('cs2cs'):
	grass.fatal(_("cs2cs program not found, install PROJ.4 first: http://proj.maptools.org"))

    #### check for overenthusiasm
    if proj_in and ll_in:
	grass.fatal(_("Choose only one input parameter method"))

    if proj_out and ll_out:
	grass.fatal(_("Choose only one output parameter method")) 

    if ll_in and ll_out:
	grass.fatal(_("Choise only one auto-projection parameter method"))

    if output and not grass.overwrite() and os.path.exists(output):
	grass.fatal(_("Output file already exists")) 

    if not coords and not input:
        grass.fatal(_("One of <coordinates> and <input> must be given"))
    if coords and input:
        grass.fatal(_("Options <coordinates> and <input> are mutually exclusive"))

    #### parse field separator
    # FIXME: input_x,y needs to split on multiple whitespace between them
    if fs == ',':
        ifs = ofs = ','
    else:
	try:
	    ifs, ofs = fs.split(',')
	except ValueError:
	    ifs = ofs = fs

    ifs = separator(ifs)
    ofs = separator(ofs)

    #### set up projection params
    s = grass.read_command("g.proj", flags='j')
    kv = parse_key_val(s)
    if "XY location" in kv['+proj'] and (ll_in or ll_out):
	grass.fatal(_("Unable to project to or from a XY location")) 

    in_proj = None

    if ll_in:
	in_proj = "+proj=longlat +datum=WGS84"
	grass.verbose("Assuming LL WGS84 as input, current projection as output ")

    if ll_out:
	in_proj = grass.read_command('g.proj', flags = 'jf')

    if proj_in:
	in_proj = proj_in

    if not in_proj:
	grass.verbose("Assuming current location as input")
        in_proj = grass.read_command('g.proj', flags = 'jf')
    
    in_proj = in_proj.strip()
    grass.verbose("Input parameters: '%s'" % in_proj)

    out_proj = None

    if ll_out:
	out_proj = "+proj=longlat +datum=WGS84"
	grass.verbose("Assuming current projection as input, LL WGS84 as output ")

    if ll_in:
	out_proj = grass.read_command('g.proj', flags = 'jf')

    if proj_out:
	out_proj = proj_out

    if not out_proj:
	grass.fatal(_("Missing output projection parameters "))
    out_proj = out_proj.strip()
    grass.verbose("Output parameters: '%s'" % out_proj)

    #### set up input file
    if coords:
        x, y = coords.split(',')
        tmpfile = grass.tempfile()
        fd = open(tmpfile, "w")
        fd.write("%s%s%s\n" % (x, ifs, y))
        fd.close()
        inf = file(tmpfile)
    else:
        if input == '-':
            infile = None
            inf = sys.stdin
        else:
            infile = input
            if not os.path.exists(infile):
                grass.fatal(_("Unable to read input data"))
            inf = file(infile)
            grass.debug("input file=[%s]" % infile)
    
    #### set up output file
    if not output:
	outfile = None
	outf = sys.stdout
    else:
	outfile = output
	outf = open(outfile, 'w')
	grass.debug("output file=[%s]" % outfile) 

    #### set up output style
    if not decimal:
	outfmt = ["-w5"]
    else:
	outfmt = ["-f", "%.8f"]
    if not copy_input:
	copyinp = []
    else:
	copyinp = ["-E"]

    #### do the conversion
    # Convert cs2cs DMS format to GRASS DMS format:
    #   cs2cs | sed -e 's/d/:/g' -e "s/'/:/g"  -e 's/"//g'

    cmd = ['cs2cs'] + copyinp + outfmt + in_proj.split() + ['+to'] + out_proj.split()
    p = grass.Popen(cmd, stdin = grass.PIPE, stdout = grass.PIPE)

    tr = TrThread(ifs, inf, p.stdin)
    tr.start()

    if not copy_input:
	if include_header:
	    outf.write("x%sy%sz\n" % (ofs, ofs))
	for line in p.stdout:
            try:
                xy, z = line.split(' ', 1)
                x, y = xy.split('\t')
            except ValueError:
                grass.fatal(line)
            
	    outf.write('%s%s%s%s%s\n' % \
                       (x.strip(), ofs, y.strip(), ofs, z.strip()))
    else:
	if include_header:
	    outf.write("input_x%sinput_y%sx%sy%sz\n" % (ofs, ofs, ofs, ofs))
	for line in p.stdout:
            inXYZ, x, rest = line.split('\t')
            inX, inY = inXYZ.split(' ')[:2]
	    y, z = rest.split(' ', 1)
	    outf.write('%s%s%s%s%s%s%s%s%s\n' % \
                       (inX.strip(), ofs, inY.strip(), ofs, x.strip(), \
		        ofs, y.strip(), ofs, z.strip()))

    p.wait()

    if p.returncode != 0:
	grass.warning(_("Projection transform probably failed, please investigate"))
Esempio n. 15
0
def main():
    coords = options['coordinates']
    input = options['input']
    output = options['output']
    fs = options['separator']
    proj_in = options['proj_in']
    proj_out = options['proj_out']
    ll_in = flags['i']
    ll_out = flags['o']
    decimal = flags['d']
    copy_input = flags['e']
    include_header = flags['c']

    # check for cs2cs
    if not gcore.find_program('cs2cs'):
        gcore.fatal(
            _("cs2cs program not found, install PROJ.4 first: \
            http://proj.maptools.org"))

    # check for overenthusiasm
    if proj_in and ll_in:
        gcore.fatal(_("Choose only one input parameter method"))

    if proj_out and ll_out:
        gcore.fatal(_("Choose only one output parameter method"))

    if ll_in and ll_out:
        gcore.fatal(_("Choose only one auto-projection parameter method"))

    if output and not gcore.overwrite() and os.path.exists(output):
        gcore.fatal(_("Output file already exists"))

    if not coords and not input:
        gcore.fatal(_("One of <coordinates> and <input> must be given"))
    if coords and input:
        gcore.fatal(
            _("Options <coordinates> and <input> are mutually exclusive"))

    # parse field separator
    # FIXME: input_x,y needs to split on multiple whitespace between them
    if fs == ',':
        ifs = ofs = ','
    else:
        try:
            ifs, ofs = fs.split(',')
        except ValueError:
            ifs = ofs = fs

    ifs = separator(ifs)
    ofs = separator(ofs)

    # set up projection params
    s = gcore.read_command("g.proj", flags='j')
    kv = parse_key_val(s)
    if "XY location" in kv['+proj'] and (ll_in or ll_out):
        gcore.fatal(_("Unable to project to or from a XY location"))

    in_proj = None

    if ll_in:
        in_proj = "+proj=longlat +datum=WGS84"
        gcore.verbose(
            "Assuming LL WGS84 as input, current projection as output ")

    if ll_out:
        in_proj = gcore.read_command('g.proj', flags='jf')

    if proj_in:
        if '+' in proj_in:
            in_proj = proj_in
        else:
            gcore.fatal(_("Invalid PROJ.4 input specification"))

    if not in_proj:
        gcore.verbose("Assuming current location as input")
        in_proj = gcore.read_command('g.proj', flags='jf')

    in_proj = in_proj.strip()
    gcore.verbose("Input parameters: '%s'" % in_proj)

    out_proj = None

    if ll_out:
        out_proj = "+proj=longlat +datum=WGS84"
        gcore.verbose(
            "Assuming current projection as input, LL WGS84 as output ")

    if ll_in:
        out_proj = gcore.read_command('g.proj', flags='jf')

    if proj_out:
        if '+' in proj_out:
            out_proj = proj_out
        else:
            gcore.fatal(_("Invalid PROJ.4 output specification"))

    if not out_proj:
        gcore.fatal(_("Missing output projection parameters "))
    out_proj = out_proj.strip()
    gcore.verbose("Output parameters: '%s'" % out_proj)

    # set up input file
    if coords:
        x, y = coords.split(',')
        tmpfile = gcore.tempfile()
        fd = open(tmpfile, "w")
        fd.write("%s%s%s\n" % (x, ifs, y))
        fd.close()
        inf = file(tmpfile)
    else:
        if input == '-':
            infile = None
            inf = sys.stdin
        else:
            infile = input
            if not os.path.exists(infile):
                gcore.fatal(_("Unable to read input data"))
            inf = file(infile)
            gcore.debug("input file=[%s]" % infile)

    # set up output file
    if not output:
        outfile = None
        outf = sys.stdout
    else:
        outfile = output
        outf = open(outfile, 'w')
        gcore.debug("output file=[%s]" % outfile)

    # set up output style
    if not decimal:
        outfmt = ["-w5"]
    else:
        outfmt = ["-f", "%.8f"]
    if not copy_input:
        copyinp = []
    else:
        copyinp = ["-E"]

    # do the conversion
    # Convert cs2cs DMS format to GRASS DMS format:
    #   cs2cs | sed -e 's/d/:/g' -e "s/'/:/g"  -e 's/"//g'

    cmd = ['cs2cs'] + copyinp + outfmt + \
        in_proj.split() + ['+to'] + out_proj.split()

    p = gcore.Popen(cmd, stdin=gcore.PIPE, stdout=gcore.PIPE)

    tr = TrThread(ifs, inf, p.stdin)
    tr.start()

    if not copy_input:
        if include_header:
            outf.write("x%sy%sz\n" % (ofs, ofs))
        for line in p.stdout:
            try:
                xy, z = line.split(' ', 1)
                x, y = xy.split('\t')
            except ValueError:
                gcore.fatal(line)

            outf.write('%s%s%s%s%s\n' %
                       (x.strip(), ofs, y.strip(), ofs, z.strip()))
    else:
        if include_header:
            outf.write("input_x%sinput_y%sx%sy%sz\n" % (ofs, ofs, ofs, ofs))
        for line in p.stdout:
            inXYZ, x, rest = line.split('\t')
            inX, inY = inXYZ.split(' ')[:2]
            y, z = rest.split(' ', 1)
            outf.write('%s%s%s%s%s%s%s%s%s\n' %
                       (inX.strip(), ofs, inY.strip(), ofs, x.strip(), ofs,
                        y.strip(), ofs, z.strip()))

    p.wait()

    if p.returncode != 0:
        gcore.warning(
            _("Projection transform probably failed, please investigate"))
Esempio n. 16
0
def main():
    global tmp

    fs = separator(options['separator'])
    threeD = flags['z']

    prog = 'v.in.lines'

    if threeD:
        do3D = 'z'
    else:
        do3D = ''

    tmp = grass.tempfile()

    # set up input file
    if options['input'] == '-':
        infile = None
        inf = sys.stdin
    else:
        infile = options['input']
        if not os.path.exists(infile):
            grass.fatal(_("Unable to read input file <%s>") % infile)
        grass.debug("input file=[%s]" % infile)

    if not infile:
        # read from stdin and write to tmpfile (v.in.mapgen wants a real file)
        outf = file(tmp, 'w')
        for line in inf:
            if len(line.lstrip()) == 0 or line[0] == '#':
                continue
            outf.write(line.replace(fs, ' '))

        outf.close()
        runfile = tmp
    else:
        # read from a real file
        if fs == ' ':
            runfile = infile
        else:
            inf = file(infile)
            outf = file(tmp, 'w')

            for line in inf:
                if len(line.lstrip()) == 0 or line[0] == '#':
                    continue
                outf.write(line.replace(fs, ' '))

            inf.close()
            outf.close()
            runfile = tmp

    # check that there are at least two columns (three if -z is given)
    inf = file(runfile)
    for line in inf:
        if len(line.lstrip()) == 0 or line[0] == '#':
            continue
        numcols = len(line.split())
        break
    inf.close()
    if (do3D and numcols < 3) or (not do3D and numcols < 2):
        grass.fatal(_("Not enough data columns. (incorrect fs setting?)"))

    grass.run_command('v.in.mapgen',
                      flags='f' + do3D,
                      input=runfile,
                      output=options['output'])
Esempio n. 17
0
def main():
    vinput = options["input"]
    columns = options["columns"].split(",")
    binary = options["developed_column"]
    level = options["subregions_column"]
    random = options["random_column"]
    sep = gutils.separator(options["separator"])
    minim = int(options["min_variables"])
    dredge = flags["d"]
    nprocs = int(options["nprocs"])
    fixed_columns = (options["fixed_columns"].split(",")
                     if options["fixed_columns"] else [])

    for each in fixed_columns:
        if each not in columns:
            gscript.fatal(
                _("Fixed predictor {} not among predictors specified in option 'columns'"
                  ).format(each))
    if options["max_variables"]:
        maxv = int(options["max_variables"])
    else:
        maxv = len(columns)
    if dredge and minim > maxv:
        gscript.fatal(
            _("Minimum number of predictor variables is larger than maximum number"
              ))

    if not gscript.find_program("Rscript", "--version"):
        gscript.fatal(
            _("Rscript required for running r.futures.potential, but not found. "
              "Make sure you have R installed and added to the PATH."))

    global TMP_CSV, TMP_RSCRIPT, TMP_POT, TMP_DREDGE
    TMP_CSV = gscript.tempfile(create=False) + ".csv"
    TMP_RSCRIPT = gscript.tempfile()
    include_level = True
    distinct = gscript.read_command(
        "v.db.select",
        flags="c",
        map=vinput,
        columns="distinct {level}".format(level=level),
    ).strip()
    if len(distinct.splitlines()) <= 1:
        include_level = False
        single_level = distinct.splitlines()[0]
    with open(TMP_RSCRIPT, "w") as f:
        f.write(rscript)
    TMP_POT = gscript.tempfile(create=False) + "_potential.csv"
    TMP_DREDGE = gscript.tempfile(create=False) + "_dredge.csv"
    columns += [binary]
    if include_level:
        columns += [level]
    if random:
        columns += [random]
    # filter duplicates
    columns = list(dict.fromkeys(columns))
    where = "{c} IS NOT NULL".format(c=columns[0])
    for c in columns[1:]:
        where += " AND {c} IS NOT NULL".format(c=c)
    gscript.run_command(
        "v.db.select",
        map=vinput,
        columns=columns,
        separator="comma",
        where=where,
        file=TMP_CSV,
    )

    if dredge:
        gscript.info(_("Running automatic model selection ..."))
    else:
        gscript.info(_("Computing model..."))

    cmd = [
        "Rscript",
        TMP_RSCRIPT,
        "-i",
        TMP_CSV,
        "-r",
        binary,
        "-o",
        TMP_POT,
        "-p",
        ",".join(columns),
        "-m",
        str(minim),
        "-x",
        str(maxv),
        "-d",
        "TRUE" if dredge else "FALSE",
        "-n",
        str(nprocs),
    ]
    if include_level:
        cmd += ["-l", level]
        if random:
            cmd += ["-a", random]
    if dredge and fixed_columns:
        cmd += ["-f", ",".join(fixed_columns)]
    if dredge and options["dredge_output"]:
        cmd += ["-e", TMP_DREDGE]
    p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    stdout, stderr = p.communicate()
    if stderr:
        gscript.warning(gscript.decode(stderr))
    if p.returncode != 0:
        gscript.fatal(_("Running R script failed, check messages above"))

    gscript.info(_("Best model summary:"))
    gscript.info("-------------------------")
    gscript.message(gscript.decode(stdout))

    # note: this would be better with pandas, but adds dependency
    with open(TMP_POT, "r") as fin, open(options["output"], "w") as fout:
        i = 0
        for line in fin.readlines():
            row = line.strip().split("\t")
            row = [each.strip('"') for each in row]
            if i == 0:
                row[0] = "ID"
                if include_level and random:
                    row[2] = row[1]
                row[1] = "Intercept"
            if i == 1 and not include_level:
                row[0] = single_level
            if i >= 1:
                if include_level:
                    # devpressure needs to be after intercept
                    if random:
                        row[2], row[1] = row[1], row[2]
                else:
                    row[0] = single_level
            fout.write(sep.join(row))
            fout.write("\n")
            i += 1
    if options["dredge_output"]:
        with open(TMP_DREDGE, "r") as fin, open(options["dredge_output"],
                                                "w") as fout:
            i = 0
            for line in fin.readlines():
                row = line.strip().split(",")
                row = [each.strip('"') for each in row]
                if i == 0:
                    row[0] = "ID"
                    row[1] = "Intercept"
                fout.write(sep.join(row))
                fout.write("\n")
                i += 1
Esempio n. 18
0
def main():
    developments = options['development'].split(',')
    observed_popul_file = options['observed_population']
    projected_popul_file = options['projected_population']
    sep = gutils.separator(options['separator'])
    subregions = options['subregions']
    methods = options['method'].split(',')
    plot = options['plot']
    simulation_times = [
        float(each) for each in options['simulation_times'].split(',')
    ]

    for each in methods:
        if each in ('exp_approach', 'logarithmic2'):
            try:
                from scipy.optimize import curve_fit
            except ImportError:
                gcore.fatal(
                    _("Importing scipy failed. Method '{m}' is not available").
                    format(m=each))

    # exp approach needs at least 3 data points
    if len(developments) <= 2 and ('exp_approach' in methods
                                   or 'logarithmic2' in methods):
        gcore.fatal(_("Not enough data for method 'exp_approach'"))
    if len(developments) == 3 and ('exp_approach' in methods
                                   and 'logarithmic2' in methods):
        gcore.warning(
            _("Can't decide between 'exp_approach' and 'logarithmic2' methods"
              " because both methods can have exact solutions for 3 data points resulting in RMSE = 0"
              ))
    observed_popul = np.genfromtxt(observed_popul_file,
                                   dtype=float,
                                   delimiter=sep,
                                   names=True)
    projected_popul = np.genfromtxt(projected_popul_file,
                                    dtype=float,
                                    delimiter=sep,
                                    names=True)
    year_col = observed_popul.dtype.names[0]
    observed_times = observed_popul[year_col]
    year_col = projected_popul.dtype.names[0]
    projected_times = projected_popul[year_col]

    if len(developments) != len(observed_times):
        gcore.fatal(
            _("Number of development raster maps doesn't not correspond to the number of observed times"
              ))

    # gather developed cells in subregions
    gcore.info(_("Computing number of developed cells..."))
    table_developed = {}
    subregionIds = set()
    for i in range(len(observed_times)):
        gcore.percent(i, len(observed_times), 1)
        data = gcore.read_command('r.univar',
                                  flags='gt',
                                  zones=subregions,
                                  map=developments[i])
        for line in data.splitlines():
            stats = line.split('|')
            if stats[0] == 'zone':
                continue
            subregionId, developed_cells = stats[0], int(stats[12])
            subregionIds.add(subregionId)
            if i == 0:
                table_developed[subregionId] = []
            table_developed[subregionId].append(developed_cells)
        gcore.percent(1, 1, 1)
    subregionIds = sorted(list(subregionIds))
    # linear interpolation between population points
    population_for_simulated_times = {}
    for subregionId in table_developed.keys():
        population_for_simulated_times[subregionId] = np.interp(
            x=simulation_times,
            xp=np.append(observed_times, projected_times),
            fp=np.append(observed_popul[subregionId],
                         projected_popul[subregionId]))
    # regression
    demand = {}
    i = 0
    if plot:
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        n_plots = np.ceil(np.sqrt(len(subregionIds)))
        fig = plt.figure(figsize=(5 * n_plots, 5 * n_plots))

    for subregionId in subregionIds:
        i += 1
        rmse = dict()
        predicted = dict()
        simulated = dict()
        coeff = dict()
        for method in methods:
            # observed population points for subregion
            reg_pop = observed_popul[subregionId]
            simulated[method] = np.array(
                population_for_simulated_times[subregionId])

            if method in ('exp_approach', 'logarithmic2'):
                # we have to scale it first
                y = np.array(table_developed[subregionId])
                magn = float(
                    np.power(
                        10,
                        max(magnitude(np.max(reg_pop)), magnitude(np.max(y)))))
                x = reg_pop / magn
                y = y / magn
                if method == 'exp_approach':
                    initial = (
                        0.5, np.mean(x), np.mean(y)
                    )  # this seems to work best for our data for exp_approach
                elif method == 'logarithmic2':
                    popt, pcov = curve_fit(logarithmic, x, y)
                    initial = (popt[0], popt[1], 0)
                with np.errstate(
                        invalid='warn'
                ):  # when 'raise' it stops every time on FloatingPointError
                    try:
                        popt, pcov = curve_fit(globals()[method],
                                               x,
                                               y,
                                               p0=initial)
                        if np.isnan(popt).any():
                            raise RuntimeError
                        # would result in nans in predicted
                        if method == 'logarithmic2' and np.any(
                                simulated[method] / magn <= popt[-1]):
                            raise RuntimeError
                    except (FloatingPointError, RuntimeError):
                        rmse[
                            method] = sys.maxsize  # so that other method is selected
                        gcore.warning(
                            _("Method '{m}' cannot converge for subregion {reg}"
                              .format(m=method, reg=subregionId)))
                        if len(methods) == 1:
                            gcore.fatal(
                                _("Method '{m}' failed for subregion {reg},"
                                  " please select at least one other method").
                                format(m=method, reg=subregionId))
                    else:
                        predicted[method] = globals()[method](
                            simulated[method] / magn, *popt) * magn
                        r = globals()[method](
                            x, *popt) * magn - table_developed[subregionId]
                        coeff[method] = popt
                        if len(reg_pop) > 3:
                            rmse[method] = np.sqrt(
                                (np.sum(r * r) / (len(reg_pop) - 3)))
                        else:
                            rmse[method] = 0
            else:
                if method == 'logarithmic':
                    reg_pop = np.log(reg_pop)
                if method == 'exponential':
                    y = np.log(table_developed[subregionId])
                else:
                    y = table_developed[subregionId]
                A = np.vstack((reg_pop, np.ones(len(reg_pop)))).T
                npversion = [int(x) for x in np.__version__.split('.')]
                if npversion >= [1, 14, 0]:
                    rcond = None
                else:
                    rcond = -1
                m, c = np.linalg.lstsq(A, y, rcond=rcond)[0]  # y = mx + c
                coeff[method] = m, c

                if method == 'logarithmic':
                    with np.errstate(invalid='ignore', divide='ignore'):
                        predicted[method] = np.where(
                            simulated[method] > 1,
                            np.log(simulated[method]) * m + c, 0)
                    predicted[method] = np.where(predicted[method] > 0,
                                                 predicted[method], 0)
                    r = (reg_pop * m + c) - table_developed[subregionId]
                elif method == 'exponential':
                    predicted[method] = np.exp(m * simulated[method] + c)
                    r = np.exp(m * reg_pop + c) - table_developed[subregionId]
                else:  # linear
                    predicted[method] = simulated[method] * m + c
                    r = (reg_pop * m + c) - table_developed[subregionId]
                # RMSE
                if len(reg_pop) > 2:
                    rmse[method] = np.sqrt(
                        (np.sum(r * r) / (len(reg_pop) - 2)))
                else:
                    rmse[method] = 0

        method = min(rmse, key=rmse.get)
        gcore.verbose(
            _("Method '{meth}' was selected for subregion {reg}").format(
                meth=method, reg=subregionId))
        # write demand
        demand[subregionId] = predicted[method]
        demand[subregionId] = np.diff(demand[subregionId])
        if np.any(demand[subregionId] < 0):
            gcore.warning(
                _("Subregion {sub} has negative numbers"
                  " of newly developed cells, changing to zero".format(
                      sub=subregionId)))
            demand[subregionId][demand[subregionId] < 0] = 0
        if coeff[method][0] < 0:
            # couldn't establish reliable population-area
            # project by number of developed pixels in analyzed period
            range_developed = table_developed[subregionId][
                -1] - table_developed[subregionId][0]
            range_times = observed_times[-1] - observed_times[0]
            dev_per_step = math.ceil(range_developed / float(range_times))
            # this assumes demand is projected yearly
            demand[subregionId].fill(dev_per_step if dev_per_step > 0 else 0)
            gcore.warning(
                _("For subregion {sub} population and development are inversely proportional,"
                  " demand will be interpolated based on prior change in development only."
                  .format(sub=subregionId)))

        # draw
        if plot:
            ax = fig.add_subplot(n_plots, n_plots, i)
            ax.set_title("{sid}, RMSE: {rmse:.3f}".format(sid=subregionId,
                                                          rmse=rmse[method]))
            ax.set_xlabel('population')
            ax.set_ylabel('developed cells')
            # plot known points
            x = np.array(observed_popul[subregionId])
            y = np.array(table_developed[subregionId])
            ax.plot(x, y, marker='o', linestyle='', markersize=8)
            # plot predicted curve
            x_pred = np.linspace(
                np.min(x),
                np.max(np.array(population_for_simulated_times[subregionId])),
                30)
            cf = coeff[method]
            if method == 'linear':
                line = x_pred * cf[0] + cf[1]
                label = "$y = {c:.3f} + {m:.3f} x$".format(m=cf[0], c=cf[1])
            elif method == 'logarithmic':
                line = np.log(x_pred) * cf[0] + cf[1]
                label = "$y = {c:.3f} + {m:.3f} \ln(x)$".format(m=cf[0],
                                                                c=cf[1])
            elif method == 'exponential':
                line = np.exp(x_pred * cf[0] + cf[1])
                label = "$y = {c:.3f} e^{{{m:.3f}x}}$".format(m=cf[0],
                                                              c=np.exp(cf[1]))
            elif method == 'exp_approach':
                line = exp_approach(x_pred / magn, *cf) * magn
                label = "$y = (1 -  e^{{-{A:.3f}(x-{B:.3f})}}) + {C:.3f}$".format(
                    A=cf[0], B=cf[1], C=cf[2])
            elif method == 'logarithmic2':
                line = logarithmic2(x_pred / magn, *cf) * magn
                label = "$y = {A:.3f} + {B:.3f} \ln(x-{C:.3f})$".format(
                    A=cf[0], B=cf[1], C=cf[2])

            ax.plot(x_pred, line, label=label)
            ax.plot(simulated[method],
                    predicted[method],
                    linestyle='',
                    marker='o',
                    markerfacecolor='None')
            plt.legend(loc=0)
            labels = ax.get_xticklabels()
            plt.setp(labels, rotation=30)
    if plot:
        plt.tight_layout()
        fig.savefig(plot)

    # write demand
    with open(options['demand'], 'w') as f:
        header = observed_popul.dtype.names  # the order is kept here
        header = [header[0]
                  ] + [sub for sub in header[1:] if sub in subregionIds]
        f.write(sep.join(header))
        f.write('\n')
        i = 0
        for time in simulation_times[1:]:
            f.write(str(int(time)))
            f.write(sep)
            # put 0 where there are more counties but are not in region
            for sub in header[1:]:  # to keep order of subregions
                f.write(str(int(demand[sub][i])))
                if sub != header[-1]:
                    f.write(sep)
            f.write('\n')
            i += 1
Esempio n. 19
0
def main():
    strds = options["strds"]
    out_name = options["output"]
    if options["weight"] == '':
        method = None
    else:
        method = options["weight"]
    where = options["where"]
    sep = separator(options["separator"])
    if flags['p'] and not options["splittingday"]:
        gscript.fatal(_("'p' flag required to set also 'splittingday' option"))
    elif flags['p'] and options["splittingday"] and out_name == '-':
        gscript.fatal(_("'output' option is required with 'p' flag"))

    if flags['k'] and flags['p']:
        gscript.fatal(_("It is not possible to use 'k' and 'p' flag together"))
    elif flags['k'] and not method:
        rkappa = True
    elif flags['k'] and method:
        gscript.message(_("If method is different from 'no' it is not possible"
                          " to use r.kappa"))
        rkappa = _load_skll()
    else:
        rkappa = _load_skll()

    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(strds, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where, "start_time", None)
    if maps is None:
        gscript.fatal(_("Space time raster dataset {st} seems to be "
                        "empty".format(st=strds)))
        return 1

    if flags['p']:
        before, after = _split_maps(maps, options["splittingday"])
        _kappa_pixel(before, after, out_name, method, gscript.overwrite())
        return

    mapnames = [mapp.get_name() for mapp in maps]
    if not rkappa:
        if out_name != '-':
            fi = open(out_name, 'w')
        else:
            fi = sys.stdout
    for i1 in range(len(mapnames)):
        for i2 in range(i1 + 1, len(mapnames)):
            map1 = mapnames[i1]
            map2 = mapnames[i2]
            if map1 != map2:
                if not rkappa:
                    fi.write("{}-{}{}{}\n".format(map1, map2, sep,
                                                  _kappa_skll(map1, map2,
                                                              flags['l'],
                                                              method)))
                else:
                    if out_name != '-':
                        fi = open("{}_{}_{}".format(out_name, map1, map2), 'w')
                    else:
                        fi = sys.stdout
                    fi.write("{}".format(_kappa_grass(map1, map2)))
                    if out_name != '-':
                        fi.close()
    if not rkappa:
        fi.close()

    gscript.message(_("All data have analyzed"))
Esempio n. 20
0
def main():
    global tmp

    fs = separator(options["separator"])
    threeD = flags["z"]

    prog = "v.in.lines"

    if threeD:
        do3D = "z"
    else:
        do3D = ""

    tmp = grass.tempfile()

    # set up input file
    if options["input"] == "-":
        infile = None
        inf = sys.stdin
    else:
        infile = options["input"]
        if not os.path.exists(infile):
            grass.fatal(_("Unable to read input file <%s>") % infile)
        grass.debug("input file=[%s]" % infile)

    if not infile:
        # read from stdin and write to tmpfile (v.in.mapgen wants a real file)
        outf = file(tmp, "w")
        for line in inf:
            if len(line.lstrip()) == 0 or line[0] == "#":
                continue
            outf.write(line.replace(fs, " "))

        outf.close()
        runfile = tmp
    else:
        # read from a real file
        if fs == " ":
            runfile = infile
        else:
            inf = file(infile)
            outf = file(tmp, "w")

            for line in inf:
                if len(line.lstrip()) == 0 or line[0] == "#":
                    continue
                outf.write(line.replace(fs, " "))

            inf.close()
            outf.close()
            runfile = tmp

    # check that there are at least two columns (three if -z is given)
    inf = file(runfile)
    for line in inf:
        if len(line.lstrip()) == 0 or line[0] == "#":
            continue
        numcols = len(line.split())
        break
    inf.close()
    if (do3D and numcols < 3) or (not do3D and numcols < 2):
        grass.fatal(_("Not enough data columns. (incorrect fs setting?)"))

    grass.run_command("v.in.mapgen", flags="f" + do3D, input=runfile, output=options["output"])
Esempio n. 21
0
def main():
    check_addon_installed("r.object.geometry", fatal=True)

    dev_start = options["development_start"]
    dev_end = options["development_end"]
    only_file = flags["l"]
    nprocs = int(options["nprocs"])
    patches_per_subregion = flags["s"]
    if not only_file:
        repeat = int(options["repeat"])
        compactness_means = [
            float(each) for each in options["compactness_mean"].split(",")
        ]
        compactness_ranges = [
            float(each) for each in options["compactness_range"].split(",")
        ]
        discount_factors = [
            float(each) for each in options["discount_factor"].split(",")
        ]
    patches_file = options["patch_sizes"]
    threshold = float(options["patch_threshold"])
    sep = gutils.separator(options["separator"])
    # v.clean removes size <= threshold, we want to keep size == threshold
    threshold -= 1e-6

    # compute cell size
    region = gcore.region()
    res = (region["nsres"] + region["ewres"]) / 2.0
    coeff = float(gcore.parse_command("g.proj", flags="g")["meters"])
    cell_size = res * res * coeff * coeff

    tmp_name = "tmp_futures_calib_" + str(os.getpid()) + "_"
    global TMP

    orig_patch_diff = tmp_name + "orig_patch_diff"
    TMP.append(orig_patch_diff)
    tmp_clump = tmp_name + "tmp_clump"
    TMP.append(tmp_clump)
    if patches_per_subregion:
        tmp_cat_clump = tmp_name + "tmp_cat_clump"
        TMP.append(tmp_cat_clump)

    gcore.message(_("Analyzing original patches..."))
    diff_development(dev_start, dev_end, options["subregions"],
                     orig_patch_diff)
    data = write_data = patch_analysis(orig_patch_diff, threshold, tmp_clump)
    if patches_per_subregion:
        subregions_data = patch_analysis_per_subregion_parallel(
            orig_patch_diff,
            options["subregions"],
            threshold,
            tmp_clump,
            tmp_name,
            nprocs,
        )
        # if there is just one column, write the previous analysis result
        if len(subregions_data.keys()) > 1:
            write_data = subregions_data
    write_patches_file(write_data, cell_size, patches_file, sep)

    if only_file:
        return

    area, perimeter = data.T
    compact = compactness(area, perimeter)

    # area histogram
    area = area / cell_size
    bin_width = (
        1.0  # automatic ways to determine bin width do not perform well in this case
    )
    hist_bins_area_orig = int(np.ptp(area) / bin_width)
    hist_range_area_orig = (np.min(area), np.max(area))
    histogram_area_orig, _edges = np.histogram(area,
                                               bins=hist_bins_area_orig,
                                               range=hist_range_area_orig,
                                               density=True)
    histogram_area_orig = histogram_area_orig * 100  # to get percentage for readability

    # compactness histogram
    bin_width = 0.1
    hist_bins_compactness_orig = int(np.ptp(compact) / bin_width)
    hist_range_compactness_orig = (np.min(compact), np.max(compact))
    histogram_compactness_orig, _edges = np.histogram(
        compact,
        bins=hist_bins_compactness_orig,
        range=hist_range_compactness_orig,
        density=True,
    )
    histogram_compactness_orig = (histogram_compactness_orig * 100
                                  )  # to get percentage for readability

    seed = int(options["random_seed"])
    count = 0
    proc_count = 0
    queue_list = []
    proc_list = []
    num_all = len(compactness_means) * len(compactness_ranges) * len(
        discount_factors)
    with open(options["calibration_results"], "w") as f:
        for com_mean in compactness_means:
            for com_range in compactness_ranges:
                for discount_factor in discount_factors:
                    count += 1
                    q = Queue()
                    p = Process(
                        target=run_one_combination,
                        args=(
                            count,
                            num_all,
                            repeat,
                            seed,
                            dev_start,
                            com_mean,
                            com_range,
                            discount_factor,
                            patches_file,
                            options,
                            threshold,
                            hist_bins_area_orig,
                            hist_range_area_orig,
                            hist_bins_compactness_orig,
                            hist_range_compactness_orig,
                            cell_size,
                            histogram_area_orig,
                            histogram_compactness_orig,
                            tmp_name,
                            q,
                        ),
                    )
                    p.start()
                    queue_list.append(q)
                    proc_list.append(p)
                    proc_count += 1
                    seed += 1
                    if proc_count == nprocs or count == num_all:
                        for i in range(proc_count):
                            proc_list[i].join()
                            data = queue_list[i].get()
                            if not data:
                                continue
                            f.write(",".join([
                                str(data["input_discount_factor"]),
                                str(data["area_distance"]),
                                str(data["input_compactness_mean"]),
                                str(data["input_compactness_range"]),
                                str(data["compactness_distance"]),
                            ]))
                            f.write("\n")
                        f.flush()
                        proc_count = 0
                        proc_list = []
                        queue_list = []
    # compute combined normalized error
    process_calibration(options["calibration_results"])