Exemple #1
0
def start_browser(entry):
    if browser != 'xdg-open' and not grass.find_program(browser):
        grass.fatal(_("Browser '%s' not found") % browser)

    if flags['o']:
        major, minor, patch = grass.version()['version'].split('.')
        url_path = 'http://grass.osgeo.org/grass%s%s/manuals/%s.html' % (
            major, minor, entry)
        if urllib.urlopen(url_path).getcode() != 200:
            url_path = 'http://grass.osgeo.org/grass%s%s/manuals/addons/%s.html' % (
                major, minor, entry)
    else:
        path = os.path.join(gisbase, 'docs', 'html', entry + '.html')
        if not os.path.exists(path) and os.getenv('GRASS_ADDON_BASE'):
            path = os.path.join(os.getenv('GRASS_ADDON_BASE'), 'docs', 'html',
                                entry + '.html')

        if not os.path.exists(path):
            grass.fatal(_("No HTML manual page entry for '%s'") % entry)

        url_path = 'file://' + path

    grass.verbose(
        _("Starting browser '%(browser)s' for manual"
          " entry '%(entry)s'...") % dict(browser=browser_name, entry=entry))

    try:
        os.execlp(browser, browser_name, url_path)
    except OSError:
        grass.fatal(
            _("Error starting browser '%(browser)s' for HTML file"
              " '%(path)s'") % dict(browser=browser, path=path))
Exemple #2
0
def list_wxgui_extensions(print_module = True):
    mlist = list()
    grass.debug('Fetching list of wxGUI extensions from GRASS-Addons SVN (be patient)...')
    pattern = re.compile(r'(<li><a href=".+">)(.+)(</a></li>)', re.IGNORECASE)
    grass.verbose(_("Checking for '%s' modules...") % 'gui/wxpython')
    
    url = '%s/%s' % (options['svnurl'], 'gui/wxpython')
    grass.debug("url = %s" % url, debug = 2)
    f = urlopen(url)
    if not f:
        grass.warning(_("Unable to fetch '%s'") % url)
        return
        
    for line in f.readlines():
        # list extensions
        sline = pattern.search(line)
        if not sline:
            continue
        name = sline.group(2).rstrip('/')
        if name not in ('..', 'Makefile'):
            if print_module:
                print name
            mlist.append(name)
    
    return mlist
Exemple #3
0
def list_available_extensions_svn():
    grass.message(_('Fetching list of extensions from GRASS-Addons SVN repository (be patient)...'))
    pattern = re.compile(r'(<li><a href=".+">)(.+)(</a></li>)', re.IGNORECASE)

    if flags['c']:
        grass.warning(_("Flag 'c' ignored, metadata file not available"))
    if flags['g']:
        grass.warning(_("Flag 'g' ignored, metadata file not available"))

    prefix = ['d', 'db', 'g', 'i', 'm', 'ps',
              'p', 'r', 'r3', 's', 'v']
    for d in prefix:
        modclass = expand_module_class_name(d)
        grass.verbose(_("Checking for '%s' modules...") % modclass)

        url = '%s/%s' % (options['svnurl'], modclass)
        grass.debug("url = %s" % url, debug = 2)
        try:
            f = urlopen(url, proxies=PROXIES)
        except HTTPError:
            grass.debug(_("Unable to fetch '%s'") % url, debug = 1)
            continue

        for line in f.readlines():
            # list extensions
            sline = pattern.search(line)
            if not sline:
                continue
            name = sline.group(2).rstrip('/')
            if name.split('.', 1)[0] == d:
                print name
Exemple #4
0
def main():
    env = grass.gisenv()
    mapset = env['MAPSET']
    ret = 0

    vectors = grass.list_grouped('vect')[mapset]
    num_vectors = len(vectors)

    if grass.verbosity() < 2:
        quiet = True
    else:
        quiet = False

    i = 1
    for vect in vectors:
        map = "%s@%s" % (vect, mapset)
        grass.message(
            _("%s\nBuilding topology for vector map <%s> (%d of %d)...\n%s") %
            ('-' * 80, map, i, num_vectors, '-' * 80))
        grass.verbose(_("v.build map=%s") % map)
        try:
            grass.run_command("v.build", map=map, quiet=quiet)
        except CalledModuleError:
            grass.error(
                _("Building topology for vector map <%s> failed") % map)
            ret = 1
        i += 1

    return ret
Exemple #5
0
def list_wxgui_extensions(print_module = True):
    mlist = list()
    grass.debug('Fetching list of wxGUI extensions from GRASS-Addons SVN (be patient)...')
    pattern = re.compile(r'(<li><a href=".+">)(.+)(</a></li>)', re.IGNORECASE)
    grass.verbose(_("Checking for '%s' modules...") % 'gui/wxpython')
    
    url = '%s/%s' % (options['svnurl'], 'gui/wxpython')
    grass.debug("url = %s" % url, debug = 2)
    f = urlopen(url)
    if not f:
        grass.warning(_("Unable to fetch '%s'") % url)
        return
        
    for line in f.readlines():
        # list extensions
        sline = pattern.search(line)
        if not sline:
            continue
        name = sline.group(2).rstrip('/')
        if name not in ('..', 'Makefile'):
            if print_module:
                print name
            mlist.append(name)
    
    return mlist
Exemple #6
0
def main():
    env = grass.gisenv()
    mapset = env['MAPSET']
    ret = 0

    vectors = grass.list_grouped('vect')[mapset]
    num_vectors = len(vectors)
    
    if grass.verbosity() < 2:
        quiet = True
    else:
        quiet = False
        
    i = 1
    for vect in vectors:
	map = "%s@%s" % (vect, mapset)
	grass.message(_("%s\nBuilding topology for vector map <%s> (%d of %d)...\n%s") % \
                          ('-' * 80, map, i, num_vectors, '-' * 80))
	grass.verbose(_("v.build map=%s") % map)
	if grass.run_command("v.build", map = map, quiet = quiet) != 0:
            grass.error(_("Building topology for vector map <%s> failed") % map)
	    ret = 1
        i += 1
    
    return ret
Exemple #7
0
def start_browser(entry):
    if browser and \
       browser not in ('xdg-open', 'start') and \
       not grass.find_program(browser):
        grass.fatal(_("Browser '%s' not found") % browser)

    if flags['o']:
        major,minor,patch = grass.version()['version'].split('.')
        url_path = 'http://grass.osgeo.org/grass%s%s/manuals/%s.html' % (major,minor,entry)
        if urllib.urlopen(url_path).getcode() != 200:
            url_path = 'http://grass.osgeo.org/grass%s%s/manuals/addons/%s.html' % (major,minor,entry)
    else:
        path = os.path.join(gisbase, 'docs', 'html', entry + '.html')
        if not os.path.exists(path) and os.getenv('GRASS_ADDON_BASE'):
            path = os.path.join(os.getenv('GRASS_ADDON_BASE'), 'docs', 'html', entry + '.html')
    
        if not os.path.exists(path):
            grass.fatal(_("No HTML manual page entry for '%s'") % entry)
    
        url_path = 'file://' + path
    
    if browser and browser not in ('xdg-open', 'start'):
        webbrowser.register(browser_name, None)
    
    grass.verbose(_("Starting browser '%(browser)s' for manual"
                    " entry '%(entry)s'...") % \
                  dict(browser=browser_name, entry=entry))
    
    try:
        webbrowser.open(url_path)
    except:
        grass.fatal(_("Error starting browser '%(browser)s' for HTML file"
                      " '%(path)s'") % dict(browser=browser, path=path))
    def GetData(self, idx, server, query, output):
        """Download data"""
        grass.message(_("Downloading data (tile %d)...") % idx)
        grass.verbose("Requesting data: %s" % self.options['mapserver'])

        if not self.flags['g']: # -> post
            try:
                urllib.urlretrieve(server, output, data = query)
            except IOError:
                grass.fatal(_("Failed while downloading the data"))
            
            if not os.path.exists(output):
                grass.fatal(_("Failed while downloading the data"))
            
            # work-around for brain-dead ArcIMS servers which want POST-data as part of the GET URL
            #   (this is technically allowed by OGC WMS def v1.3.0 Sec6.3.4)
            if os.path.getsize(output) == 0:
                grass.warning(_("Downloaded image file is empty -- trying another method"))
                self.flags['g'] = True
            
        if self.flags['g']: # -> get
            try:
                urllib.urlretrieve(server + '?' + query, output, data = None)
            except IOError:
                grass.fatal(_("Failed while downloading the data"))
            
            if not os.path.exists(output) or os.path.getsize(output) == 0:
                grass.fatal(_("Failed while downloading the data"))
Exemple #9
0
def cleanup():
    if CLEANUP:
        gcore.verbose(_("Cleaning temporary maps..."))
        gcore.run_command("g.remove",
                          flags="f",
                          type="raster",
                          pattern=TMP_NAME + "*",
                          quiet=True)
Exemple #10
0
def cleanup():
    if CLEANUP:
        gcore.verbose(_("Cleaning temporary maps..."))
        gcore.run_command('g.remove',
                          flags='f',
                          type='raster',
                          pattern=TMP_NAME + "*",
                          quiet=True)
Exemple #11
0
 def GetTiles(self, requests):
     grass.message(_("Downloading tiles..."))
     
     i = 0
     for item in requests:
         if os.path.exists(item['output']) and \
                 os.path.getsize(item['output']) > 0:
             grass.verbose("Tile already downloaded")
         else:
             self.GetData(i, item['server'], item['string'], item['output'])
         i += 1
Exemple #12
0
def remove_extension(force = False):
    # try to read XML metadata file first
    fXML = os.path.join(options['prefix'], 'modules.xml')
    name = options['extension']
    if name not in get_installed_extensions():
        grass.warning(_("Extension <%s> not found") % name)
    
    if force:
        grass.verbose(_("List of removed files:"))
    else:
        grass.info(_("Files to be removed (use flag 'f' to force removal):"))
    
    if os.path.exists(fXML):
        f = open(fXML, 'r')
        tree = etree.fromstring(f.read())
        flist = []
        for task in tree.findall('task'):
            if name == task.get('name', default = '') and \
                    task.find('binary') is not None:
                for f in task.find('binary').findall('file'):
                    flist.append(f.text)
        
        if flist:
            removed = False
            err = list()
            for fpath in flist:
                try:
                    if force:
                        grass.verbose(fpath)
                        os.remove(fpath)
                        removed = True
                    else:
                        print fpath
                except OSError:
                    err.append((_("Unable to remove file '%s'") % fpath))
            if force and not removed:
                grass.fatal(_("Extension <%s> not found") % options['extension'])
            
            if err:
                for e in err:
                    grass.error(e)
        else:
            remove_extension_std(force)
    else:
        remove_extension_std(force)

    if force:
        grass.message(_("Updating metadata file..."))
        remove_extension_xml()
        grass.message(_("Extension <%s> successfully uninstalled.") % options['extension'])
    else:
        grass.warning(_("Extension <%s> not removed.\n"
                        "Re-run '%s' with 'f' flag to force removal") % (options['extension'], 'g.extension'))
Exemple #13
0
def remove_extension_std(name, force = False):
    for fpath in [os.path.join(options['prefix'], 'bin', name),
                  os.path.join(options['prefix'], 'scripts', name),
                  os.path.join(options['prefix'], 'docs', 'html', name + '.html'),
                  os.path.join(options['prefix'], 'docs', 'rest', name + '.txt'),
                  os.path.join(options['prefix'], 'docs', 'man', 'man1', name + '.1')]:
        if os.path.isfile(fpath):
            if force:
                grass.verbose(fpath)
                os.remove(fpath)
            else:
                print fpath
Exemple #14
0
def remove_extension(force = False):
    # try to read XML metadata file first
    fXML = os.path.join(options['prefix'], 'modules.xml')
    name = options['extension']
    if name not in get_installed_extensions():
        grass.warning(_("Extension <%s> not found") % name)
    
    if force:
        grass.verbose(_("List of removed files:"))
    else:
        grass.info(_("Files to be removed (use flag 'f' to force removal):"))
    
    if os.path.exists(fXML):
        f = open(fXML, 'r')
        tree = etree.fromstring(f.read())
        flist = []
        for task in tree.findall('task'):
            if name == task.get('name', default = '') and \
                    task.find('binary') is not None:
                for f in task.find('binary').findall('file'):
                    flist.append(f.text)
        
        if flist:
            removed = False
            err = list()
            for fpath in flist:
                try:
                    if force:
                        grass.verbose(fpath)
                        os.remove(fpath)
                        removed = True
                    else:
                        print fpath
                except OSError:
                    err.append((_("Unable to remove file '%s'") % fpath))
            if force and not removed:
                grass.fatal(_("Extension <%s> not found") % options['extension'])
            
            if err:
                for e in err:
                    grass.error(e)
        else:
            remove_extension_std(force)
    else:
        remove_extension_std(force)

    if force:
        grass.message(_("Updating metadata file..."))
        remove_extension_xml()
        grass.message(_("Extension <%s> successfully uninstalled.") % options['extension'])
    else:
        grass.warning(_("Extension <%s> not removed.\n"
                        "Re-run '%s' with 'f' flag to force removal") % (options['extension'], 'g.extension'))
Exemple #15
0
def remove_modules(mlist, force = False):
    # try to read XML metadata file first
    fXML = os.path.join(options['prefix'], 'modules.xml')
    installed = get_installed_modules()

    if os.path.exists(fXML):
        f = open(fXML, 'r')
        tree = etree.fromstring(f.read())
        f.close()
    else:
        tree = None

    for name in mlist:
        if name not in installed:
            # try even if module does not seem to be available,
            # as the user may be trying to get rid of left over cruft
            grass.warning(_("Extension <%s> not found") % name)

        if tree is not None:
            flist = []
            for task in tree.findall('task'):
                if name == task.get('name') and \
                        task.find('binary') is not None:
                    for f in task.find('binary').findall('file'):
                        flist.append(f.text)
                    break

            if flist:
                removed = False
                err = list()
                for fpath in flist:
                    try:
                        if force:
                            grass.verbose(fpath)
                            removed = True
                            os.remove(fpath)
                        else:
                            print fpath
                    except OSError:
                        err.append((_("Unable to remove file '%s'") % fpath))
                if force and not removed:
                    grass.fatal(_("Extension <%s> not found") % name)

                if err:
                    for e in err:
                        grass.error(e)
            else:
                remove_extension_std(name, force)
        else:
            remove_extension_std(name, force)
Exemple #16
0
def remove_extension_std(force = False):
    # try even if module does not seem to be available,
    # as the user may be trying to get rid of left over cruft
    for fpath in [os.path.join(options['prefix'], options['extension']),
                  os.path.join(options['prefix'], 'bin', options['extension']),
                  os.path.join(options['prefix'], 'scripts', options['extension']),
                  os.path.join(options['prefix'], 'docs', 'html', options['extension'] + '.html'),
                  os.path.join(options['prefix'], 'docs', 'man', 'man1', options['extension'] + '.1'),
                  os.path.join(options['prefix'], 'man', 'man1', options['extension'] + '.1')]:
        if os.path.isfile(fpath):
            if force:
                grass.verbose(fpath)
                os.remove(fpath)
            else:
                print fpath
Exemple #17
0
def start_browser(entry):
    if browser != "xdg-open" and not grass.find_program(browser):
        grass.fatal(_("Browser <%s> not found") % browser)

    path = os.path.join(gisbase, "docs", "html", entry + ".html")
    if not os.path.exists(path) and os.getenv("GRASS_ADDON_PATH"):
        path = os.path.join(os.getenv("GRASS_ADDON_PATH"), "docs", "html", entry + ".html")

    if not os.path.exists(path):
        grass.fatal(_("No HTML manual page entry for <%s>") % entry)

    grass.verbose(_("Starting browser <%s> for module %s...") % (browser_name, entry))

    os.execlp(browser, browser_name, "file://%s" % (path))
    grass.fatal(_("Error starting browser <%s> for HTML file <%s>") % (browser, entry))
Exemple #18
0
def remove_extension_std(force = False):
    # try even if module does not seem to be available,
    # as the user may be trying to get rid of left over cruft
    for fpath in [os.path.join(options['prefix'], options['extension']),
                  os.path.join(options['prefix'], 'bin', options['extension']),
                  os.path.join(options['prefix'], 'scripts', options['extension']),
                  os.path.join(options['prefix'], 'docs', 'html', options['extension'] + '.html'),
                  os.path.join(options['prefix'], 'docs', 'man', 'man1', options['extension'] + '.1'),
                  os.path.join(options['prefix'], 'man', 'man1', options['extension'] + '.1')]:
        if os.path.isfile(fpath):
            if force:
                grass.verbose(fpath)
                os.remove(fpath)
            else:
                print fpath
Exemple #19
0
def start_browser(entry):
    if (
        browser
        and browser not in ("xdg-open", "start")
        and not grass.find_program(browser)
    ):
        grass.fatal(_("Browser '%s' not found") % browser)

    if flags["o"]:
        major, minor, patch = grass.version()["version"].split(".")
        url_path = "https://grass.osgeo.org/grass%s%s/manuals/%s.html" % (
            major,
            minor,
            entry,
        )
        if urlopen(url_path).getcode() != 200:
            url_path = "https://grass.osgeo.org/grass%s%s/manuals/addons/%s.html" % (
                major,
                minor,
                entry,
            )
    else:
        path = os.path.join(gisbase, "docs", "html", entry + ".html")
        if not os.path.exists(path) and os.getenv("GRASS_ADDON_BASE"):
            path = os.path.join(
                os.getenv("GRASS_ADDON_BASE"), "docs", "html", entry + ".html"
            )

        if not os.path.exists(path):
            grass.fatal(_("No HTML manual page entry for '%s'") % entry)

        url_path = "file://" + path

    if browser and browser not in ("xdg-open", "start"):
        webbrowser.register(browser_name, None)

    grass.verbose(
        _("Starting browser '%(browser)s' for manual" " entry '%(entry)s'...")
        % dict(browser=browser_name, entry=entry)
    )

    try:
        webbrowser.open(url_path)
    except:
        grass.fatal(
            _("Error starting browser '%(browser)s' for HTML file" " '%(path)s'")
            % dict(browser=browser, path=path)
        )
Exemple #20
0
def update_manual_page(module):
    if module.split('.', 1)[0] == 'wx':
        return # skip for GUI modules

    grass.verbose(_("Manual page for <%s> updated") % module)
    # read original html file
    htmlfile = os.path.join(options['prefix'], 'docs', 'html', module + '.html')
    try:
        f = open(htmlfile)
        shtml = f.read()
    except IOError as e:
        grass.fatal(_("Unable to read manual page: %s") % e)
    else:
        f.close()

    # find URIs
    pattern = r'''<a href="([^"]+)">([^>]+)</a>'''
    addons = get_installed_extensions(force = True)
    pos = []
    for match in re.finditer(pattern, shtml):
        if match.group(1)[:7] == 'http://':
            continue
        if match.group(1).replace('.html', '') in addons:
            continue
        pos.append(match.start(1))

    if not pos:
        return # no match

    # replace file URIs
    prefix = 'file://' + '/'.join([os.getenv('GISBASE'), 'docs', 'html'])
    ohtml = shtml[:pos[0]]
    for i in range(1, len(pos)):
        ohtml += prefix + '/' + shtml[pos[i-1]:pos[i]]
    ohtml += prefix + '/' + shtml[pos[-1]:]

    # write updated html file
    try:
        f = open(htmlfile, 'w')
        f.write(ohtml)
    except IOError as e:
        grass.fatal(_("Unable for write manual page: %s") % e)
    else:
        f.close()
Exemple #21
0
def main():
    shell = flags['g']
    image = {}
    for band in bands:
	image[band] = options['image%d' % band]

    # calculate the Stddev for TM bands
    grass.message(_("Calculating Standard deviations for all bands..."))
    stddev = {}
    for band in bands:
	grass.verbose("band %d" % band)
	s = grass.read_command('r.univar', flags = 'g', map = image[band])
	kv = grass.parse_key_val(s)
	stddev[band] = float(kv['stddev'])

    grass.message(_("Calculating Correlation Matrix..."))
    correlation = {}
    s = grass.read_command('r.covar', flags = 'r', map = [image[band] for band in bands])
    for i, row in zip(bands, s.splitlines()):
	for j, cell in zip(bands, row.split(' ')):
	    correlation[i,j] = float(cell)

    # Calculate all combinations
    grass.message(_("Calculating OIF for the 20 band combinations..."))

    oif = []
    for p in perms():
	oif.append((oifcalc(stddev, correlation, *p), p))
    oif.sort(reverse = True)

    grass.verbose(_("The Optimum Index Factor analysis result "
                    "(Best combination comes first):"))
    
    if shell:
	fmt = "%d%d%d:%f\n"
    else:
	fmt = "%d%d%d:  %f\n"

    outf = file('i.oif.result', 'w')
    for v, p in oif:
	sys.stdout.write(fmt % (p + (v,)))
	outf.write(fmt % (p + (v,)))
    outf.close()
Exemple #22
0
def remove_extension(force = False):
    if flags['t']:
        mlist = get_toolbox_modules(options['extension'])
    else:
        mlist = [options['extension']]

    if force:
        grass.verbose(_("List of removed files:"))
    else:
        grass.info(_("Files to be removed (use flag 'f' to force removal):"))

    remove_modules(mlist, force)

    if force:
        grass.message(_("Updating metadata file..."))
        remove_extension_xml(mlist)
        grass.message(_("Extension <%s> successfully uninstalled.") % options['extension'])
    else:
        grass.warning(_("Extension <%s> not removed.\n"
                        "Re-run '%s' with 'f' flag to force removal") % (options['extension'], 'g.extension'))
Exemple #23
0
def list_available_modules():
    mlist = list()
    grass.message(_('Fetching list of modules from GRASS-Addons SVN (be patient)...'))
    pattern = re.compile(r'(<li><a href=".+">)(.+)(</a></li>)', re.IGNORECASE)
    i = 0
    prefix = ['d', 'db', 'g', 'i', 'm', 'ps',
              'p', 'r', 'r3', 's', 'v']
    nprefix = len(prefix)
    for d in prefix:
        if flags['g']:
            grass.percent(i, nprefix, 1)
            i += 1
        
        modclass = expand_module_class_name(d)
        grass.verbose(_("Checking for '%s' modules...") % modclass)
        
        url = '%s/%s' % (options['svnurl'], modclass)
        grass.debug("url = %s" % url, debug = 2)
        f = urllib.urlopen(url)
        if not f:
            grass.warning(_("Unable to fetch '%s'") % url)
            continue
        
        for line in f.readlines():
            # list modules
            sline = pattern.search(line)
            if not sline:
                continue
            name = sline.group(2).rstrip('/')
            if name.split('.', 1)[0] == d:
                print_module_desc(name, url)
                mlist.append(name)
    
    mlist += list_wxgui_extensions()
    
    if flags['g']:
        grass.percent(1, 1, 1)
    
    return mlist
Exemple #24
0
def read_map(mapname, scalefactor=1.0):
    """Return numpy array from a GRASS raster map."""

    # show which map is processed if verbose
    grass.verbose(mapname)

    # parse smoothing option
    smooth = options['smooth']

    # smooth map with r.neighbors
    if smooth:
        smoothmap = 'r.out.pism_' + str(os.getpid()) + '_tmp'
        grass.run_command('r.neighbors', flags='c',
                          input=mapname, output=smoothmap,
                          size=options['smooth'], quiet=True)
        mapname = smoothmap

    # read map into array
    a = garray.array()
    a.read(mapname)
    if smooth:
        grass.run_command('g.remove', rast=smoothmap, quiet=True)
    return transpose(flipud(a[:]))*scalefactor
Exemple #25
0
def main():
    input = options["input"]
    output = options["output"]
    fs = options["fs"]
    proj_in = options["proj_input"]
    proj_out = options["proj_output"]
    ll_in = flags["i"]
    ll_out = flags["o"]
    decimal = flags["d"]
    copy_input = flags["e"]
    include_header = flags["c"]

    #### check for cs2cs
    if not grass.find_program("cs2cs"):
        grass.fatal(_("cs2cs program not found, install PROJ.4 first: http://proj.maptools.org"))

        #### check for overenthusiasm
    if proj_in and ll_in:
        grass.fatal(_("Choose only one input parameter method"))

    if proj_out and ll_out:
        grass.fatal(_("Choose only one output parameter method"))

    if ll_in and ll_out:
        grass.fatal(_("Choise only one auto-projection parameter method"))

    if output and not grass.overwrite() and os.path.exists(output):
        grass.fatal(_("Output file already exists"))

        #### parse field separator
        # FIXME: input_x,y needs to split on multiple whitespace between them
    if fs == ",":
        ifs = ofs = ","
    else:
        try:
            ifs, ofs = fs.split(",")
        except ValueError:
            ifs = ofs = fs

    ifs = ifs.lower()
    ofs = ofs.lower()

    if ifs in ("space", "tab"):
        ifs = " "
    elif ifs == "comma":
        ifs = ","
    else:
        if len(ifs) > 1:
            grass.warning(_("Invalid field separator, using '%s'") % ifs[0])
        try:
            ifs = ifs[0]
        except IndexError:
            grass.fatal(_("Invalid field separator '%s'") % ifs)

    if ofs.lower() == "space":
        ofs = " "
    elif ofs.lower() == "tab":
        ofs = "\t"
    elif ofs.lower() == "comma":
        ofs = ","
    else:
        if len(ofs) > 1:
            grass.warning(_("Invalid field separator, using '%s'") % ofs[0])
        try:
            ofs = ofs[0]
        except IndexError:
            grass.fatal(_("Invalid field separator '%s'") % ifs)

    #### set up projection params
    s = grass.read_command("g.proj", flags="j")
    kv = grass.parse_key_val(s)
    if "XY location" in kv["+proj"] and (ll_in or ll_out):
        grass.fatal(_("Unable to project to or from a XY location"))

    in_proj = None

    if ll_in:
        in_proj = "+proj=longlat +datum=WGS84"
        grass.verbose("Assuming LL WGS84 as input, current projection as output ")

    if ll_out:
        in_proj = grass.read_command("g.proj", flags="jf")

    if proj_in:
        in_proj = proj_in

    if not in_proj:
        grass.verbose("Assuming current location as input")
        in_proj = grass.read_command("g.proj", flags="jf")

    in_proj = in_proj.strip()
    grass.verbose("Input parameters: '%s'" % in_proj)

    out_proj = None

    if ll_out:
        out_proj = "+proj=longlat +datum=WGS84"
        grass.verbose("Assuming current projection as input, LL WGS84 as output ")

    if ll_in:
        out_proj = grass.read_command("g.proj", flags="jf")

    if proj_out:
        out_proj = proj_out

    if not out_proj:
        grass.fatal(_("Missing output projection parameters "))
    out_proj = out_proj.strip()
    grass.verbose("Output parameters: '%s'" % out_proj)

    #### set up input file
    if input == "-":
        infile = None
        inf = sys.stdin
    else:
        infile = input
        if not os.path.exists(infile):
            grass.fatal(_("Unable to read input data"))
        inf = file(infile)
        grass.debug("input file=[%s]" % infile)

        #### set up output file
    if not output:
        outfile = None
        outf = sys.stdout
    else:
        outfile = output
        outf = open(outfile, "w")
        grass.debug("output file=[%s]" % outfile)

        #### set up output style
    if not decimal:
        outfmt = ["-w5"]
    else:
        outfmt = ["-f", "%.8f"]
    if not copy_input:
        copyinp = []
    else:
        copyinp = ["-E"]

        #### do the conversion
        # Convert cs2cs DMS format to GRASS DMS format:
        #   cs2cs | sed -e 's/d/:/g' -e "s/'/:/g"  -e 's/"//g'

    cmd = ["cs2cs"] + copyinp + outfmt + in_proj.split() + ["+to"] + out_proj.split()
    p = grass.Popen(cmd, stdin=grass.PIPE, stdout=grass.PIPE)

    tr = TrThread(ifs, inf, p.stdin)
    tr.start()

    if not copy_input:
        if include_header:
            outf.write("x%sy%sz\n" % (ofs, ofs))
        for line in p.stdout:
            xy, z = line.split(" ", 1)
            x, y = xy.split("\t")
            outf.write("%s%s%s%s%s\n" % (x.strip(), ofs, y.strip(), ofs, z.strip()))
    else:
        if include_header:
            outf.write("input_x%sinput_y%sx%sy%sz\n" % (ofs, ofs, ofs, ofs))
        for line in p.stdout:
            inXYZ, x, rest = line.split("\t")
            inX, inY = inXYZ.split(" ")[:2]
            y, z = rest.split(" ", 1)
            outf.write(
                "%s%s%s%s%s%s%s%s%s\n" % (inX.strip(), ofs, inY.strip(), ofs, x.strip(), ofs, y.strip(), ofs, z.strip())
            )

    p.wait()

    if p.returncode != 0:
        grass.warning(_("Projection transform probably failed, please investigate"))
Exemple #26
0
def main():
    infile = options["input"]
    compression_off = flags["c"]
    mapset = None
    if "@" in infile:
        infile, mapset = infile.split("@")

    if options["output"]:
        outfile_path, outfile_base = os.path.split(
            os.path.abspath(options["output"]))
    else:
        outfile_path, outfile_base = os.path.split(
            os.path.abspath(infile + ".pack"))

    outfile = os.path.join(outfile_path, outfile_base)

    global tmp
    tmp = grass.tempdir()
    tmp_dir = os.path.join(tmp, infile)
    os.mkdir(tmp_dir)
    grass.debug("tmp_dir = %s" % tmp_dir)

    gfile = grass.find_file(name=infile, element="cell", mapset=mapset)
    if not gfile["name"]:
        grass.fatal(_("Raster map <%s> not found") % infile)

    if os.path.exists(outfile):
        if os.getenv("GRASS_OVERWRITE"):
            grass.warning(
                _("Pack file <%s> already exists and will be overwritten") %
                outfile)
            try_remove(outfile)
        else:
            grass.fatal(_("option <output>: <%s> exists.") % outfile)

    grass.message(_("Packing <%s> to <%s>...") % (gfile["fullname"], outfile))
    basedir = os.path.sep.join(
        os.path.normpath(gfile["file"]).split(os.path.sep)[:-2])
    olddir = os.getcwd()

    # copy elements
    info = grass.parse_command("r.info", flags="e", map=infile)
    vrt_files = {}
    if info["maptype"] == "virtual":
        map_file = grass.find_file(
            name=infile,
            element="cell_misc",
        )
        if map_file["file"]:
            vrt = os.path.join(map_file["file"], "vrt")
            if os.path.exists(vrt):
                with open(vrt, "r") as f:
                    for r in f.readlines():
                        map, mapset = r.split("@")
                        map_basedir = os.path.sep.join(
                            os.path.normpath(map_file["file"], ).split(
                                os.path.sep)[:-2], )
                        vrt_files[map] = map_basedir

    for element in [
            "cats",
            "cell",
            "cellhd",
            "cell_misc",
            "colr",
            "fcell",
            "hist",
    ]:
        path = os.path.join(basedir, element, infile)
        if os.path.exists(path):
            grass.debug("copying %s" % path)
            if os.path.isfile(path):
                shutil.copyfile(
                    path,
                    os.path.join(tmp_dir, element),
                )
            else:
                shutil.copytree(
                    path,
                    os.path.join(tmp_dir, element),
                )

        # Copy vrt files
        if vrt_files:
            for f in vrt_files.keys():
                f_tmp_dir = os.path.join(tmp, f)
                if not os.path.exists(f_tmp_dir):
                    os.mkdir(f_tmp_dir)
                path = os.path.join(vrt_files[f], element, f)
                if os.path.exists(path):
                    grass.debug("copying vrt file {}".format(path))
                    if os.path.isfile(path):
                        shutil.copyfile(
                            path,
                            os.path.join(f_tmp_dir, element),
                        )
                    else:
                        shutil.copytree(
                            path,
                            os.path.join(f_tmp_dir, element),
                        )

    if not os.listdir(tmp_dir):
        grass.fatal(_("No raster map components found"))

    # copy projection info
    # (would prefer to use g.proj*, but this way is 5.3 and 5.7 compat)
    gisenv = grass.gisenv()
    for support in ["INFO", "UNITS", "EPSG"]:
        path = os.path.join(gisenv["GISDBASE"], gisenv["LOCATION_NAME"],
                            "PERMANENT", "PROJ_" + support)
        if os.path.exists(path):
            shutil.copyfile(path, os.path.join(tmp_dir, "PROJ_" + support))

    # pack it all up
    os.chdir(tmp)
    if compression_off:
        tar = tarfile.TarFile.open(name=outfile_base, mode="w:")
    else:
        tar = tarfile.TarFile.open(name=outfile_base, mode="w:gz")
    tar.add(infile, recursive=True)
    if vrt_files:
        for f in vrt_files.keys():
            tar.add(f, recursive=True)

    tar.close()
    try:
        shutil.move(outfile_base, outfile)
    except shutil.Error as e:
        grass.fatal(e)

    os.chdir(olddir)

    grass.verbose(_("Raster map saved to '%s'" % outfile))
Exemple #27
0
def main():
    infile = options['input']
    output = options['output']
    method = options['method']
    dtype = options['type']
    fs = options['separator']
    x = options['x']
    y = options['y']
    z = options['z']
    value_column = options['value_column']
    vrange = options['vrange']
    vscale = options['vscale']
    percent = options['percent']
    pth = options['pth']
    trim = options['trim']
    workers = int(options['workers'])
    scan_only = flags['s']
    shell_style = flags['g']
    ignore_broken = flags['i']

    if workers == 1 and "WORKERS" in os.environ:
        workers = int(os.environ["WORKERS"])

    if not os.path.exists(infile):
        grass.fatal(_("Unable to read input file <%s>") % infile)

    addl_opts = {}
    if pth:
        addl_opts['pth'] = '%s' % pth
    if trim:
        addl_opts['trim'] = '%s' % trim
    if value_column:
        addl_opts['value_column'] = '%s' % value_column
    if vrange:
        addl_opts['vrange'] = '%s' % vrange
    if vscale:
        addl_opts['vscale'] = '%s' % vscale
    if ignore_broken:
        addl_opts['flags'] = 'i'

    if scan_only or shell_style:
        if shell_style:
            doShell = 'g'
        else:
            doShell = ''
        grass.run_command('r.in.xyz', flags='s' + doShell, input=infile,
                          output='dummy', sep=fs, x=x, y=y, z=z,
                          **addl_opts)
        sys.exit()

    if dtype == 'float':
        data_type = 'FCELL'
    else:
        data_type = 'DCELL'

    region = grass.region(region3d=True)

    if region['nsres'] != region['nsres3'] or region['ewres'] != region['ewres3']:
        grass.run_command('g.region', flags='3p')
        grass.fatal(_("The 2D and 3D region settings are different. Can not continue."))

    grass.verbose(_("Region bottom=%.15g  top=%.15g  vertical_cell_res=%.15g  (%d depths)")
                  % (region['b'], region['t'], region['tbres'], region['depths']))

    grass.verbose(_("Creating slices ..."))

    # to avoid a point which falls exactly on a top bound from being
    # considered twice, we shrink the
    # For the top slice we keep it though, as someone scanning the bounds
    # may have set the bounds exactly to the data extent (a bad idea, but
    # it happens..)
    eps = 1.0e-15

    # if there are thousands of depths hopefully this array doesn't get too
    # large and so we don't have to worry much about storing/looping through
    # all the finished process infos.
    proc = {}
    pout = {}

    depths = list(range(1, 1 + region['depths']))

    for i in depths:
        tmp_layer_name = 'tmp.r3xyz.%d.%s' % (os.getpid(), '%05d' % i)

        zrange_min = region['b'] + (region['tbres'] * (i - 1))

        if i < region['depths']:
            zrange_max = region['b'] + (region['tbres'] * i) - eps
        else:
            zrange_max = region['b'] + (region['tbres'] * i)

        # spawn depth layer import job in the background
        #grass.debug("slice %d, <%s>  %% %d" % (band, image[band], band % workers))
        grass.message(_("Processing horizontal slice %d of %d [%.15g,%.15g) ...")
                      % (i, region['depths'], zrange_min, zrange_max))

        proc[i] = grass.start_command('r.in.xyz', input=infile, output=tmp_layer_name,
                                      sep=fs, method=method, x=x, y=y, z=z,
                                      percent=percent, type=data_type,
                                      zrange='%.15g,%.15g' % (zrange_min, zrange_max),
                                      **addl_opts)

        grass.debug("i=%d, %%=%d  (workers=%d)" % (i, i % workers, workers))
        # print sys.getsizeof(proc)  # sizeof(proc array)  [not so big]

        if i % workers == 0:
            # wait for the ones launched so far to finish
            for p_i in depths[:i]:
                pout[p_i] = proc[p_i].communicate()[0]
                if proc[p_i].wait() != 0:
                    grass.fatal(_("Trouble importing data. Aborting."))

    # wait for jSobs to finish, collect any stray output
    for i in depths:
        pout[i] = proc[i].communicate()[0]
        if proc[i].wait() != 0:
            grass.fatal(_("Trouble importing data. Aborting."))

    del proc

    grass.verbose(_("Assembling 3D cube ..."))

    # input order: lower most strata first
    slices = grass.read_command('g.list', type='raster', sep=',',
                                pattern='tmp.r3xyz.%d.*' % os.getpid()).rstrip(os.linesep)
    grass.debug(slices)

    try:
        grass.run_command('r.to.rast3', input=slices, output=output)
    except CalledModuleError:
        grass.message(_("Done. 3D raster map <%s> created.") % output)
Exemple #28
0
def main():
    coords = options['coordinates']
    input = options['input']
    output = options['output']
    fs = options['separator']
    proj_in = options['proj_in']
    proj_out = options['proj_out']
    ll_in = flags['i']
    ll_out = flags['o']
    decimal = flags['d']
    copy_input = flags['e']
    include_header = flags['c']

    #### check for cs2cs
    if not grass.find_program('cs2cs'):
	grass.fatal(_("cs2cs program not found, install PROJ.4 first: http://proj.maptools.org"))

    #### check for overenthusiasm
    if proj_in and ll_in:
	grass.fatal(_("Choose only one input parameter method"))

    if proj_out and ll_out:
	grass.fatal(_("Choose only one output parameter method")) 

    if ll_in and ll_out:
	grass.fatal(_("Choise only one auto-projection parameter method"))

    if output and not grass.overwrite() and os.path.exists(output):
	grass.fatal(_("Output file already exists")) 

    if not coords and not input:
        grass.fatal(_("One of <coordinates> and <input> must be given"))
    if coords and input:
        grass.fatal(_("Options <coordinates> and <input> are mutually exclusive"))

    #### parse field separator
    # FIXME: input_x,y needs to split on multiple whitespace between them
    if fs == ',':
        ifs = ofs = ','
    else:
	try:
	    ifs, ofs = fs.split(',')
	except ValueError:
	    ifs = ofs = fs

    ifs = separator(ifs)
    ofs = separator(ofs)

    #### set up projection params
    s = grass.read_command("g.proj", flags='j')
    kv = parse_key_val(s)
    if "XY location" in kv['+proj'] and (ll_in or ll_out):
	grass.fatal(_("Unable to project to or from a XY location")) 

    in_proj = None

    if ll_in:
	in_proj = "+proj=longlat +datum=WGS84"
	grass.verbose("Assuming LL WGS84 as input, current projection as output ")

    if ll_out:
	in_proj = grass.read_command('g.proj', flags = 'jf')

    if proj_in:
	in_proj = proj_in

    if not in_proj:
	grass.verbose("Assuming current location as input")
        in_proj = grass.read_command('g.proj', flags = 'jf')
    
    in_proj = in_proj.strip()
    grass.verbose("Input parameters: '%s'" % in_proj)

    out_proj = None

    if ll_out:
	out_proj = "+proj=longlat +datum=WGS84"
	grass.verbose("Assuming current projection as input, LL WGS84 as output ")

    if ll_in:
	out_proj = grass.read_command('g.proj', flags = 'jf')

    if proj_out:
	out_proj = proj_out

    if not out_proj:
	grass.fatal(_("Missing output projection parameters "))
    out_proj = out_proj.strip()
    grass.verbose("Output parameters: '%s'" % out_proj)

    #### set up input file
    if coords:
        x, y = coords.split(',')
        tmpfile = grass.tempfile()
        fd = open(tmpfile, "w")
        fd.write("%s%s%s\n" % (x, ifs, y))
        fd.close()
        inf = file(tmpfile)
    else:
        if input == '-':
            infile = None
            inf = sys.stdin
        else:
            infile = input
            if not os.path.exists(infile):
                grass.fatal(_("Unable to read input data"))
            inf = file(infile)
            grass.debug("input file=[%s]" % infile)
    
    #### set up output file
    if not output:
	outfile = None
	outf = sys.stdout
    else:
	outfile = output
	outf = open(outfile, 'w')
	grass.debug("output file=[%s]" % outfile) 

    #### set up output style
    if not decimal:
	outfmt = ["-w5"]
    else:
	outfmt = ["-f", "%.8f"]
    if not copy_input:
	copyinp = []
    else:
	copyinp = ["-E"]

    #### do the conversion
    # Convert cs2cs DMS format to GRASS DMS format:
    #   cs2cs | sed -e 's/d/:/g' -e "s/'/:/g"  -e 's/"//g'

    cmd = ['cs2cs'] + copyinp + outfmt + in_proj.split() + ['+to'] + out_proj.split()
    p = grass.Popen(cmd, stdin = grass.PIPE, stdout = grass.PIPE)

    tr = TrThread(ifs, inf, p.stdin)
    tr.start()

    if not copy_input:
	if include_header:
	    outf.write("x%sy%sz\n" % (ofs, ofs))
	for line in p.stdout:
            try:
                xy, z = line.split(' ', 1)
                x, y = xy.split('\t')
            except ValueError:
                grass.fatal(line)
            
	    outf.write('%s%s%s%s%s\n' % \
                       (x.strip(), ofs, y.strip(), ofs, z.strip()))
    else:
	if include_header:
	    outf.write("input_x%sinput_y%sx%sy%sz\n" % (ofs, ofs, ofs, ofs))
	for line in p.stdout:
            inXYZ, x, rest = line.split('\t')
            inX, inY = inXYZ.split(' ')[:2]
	    y, z = rest.split(' ', 1)
	    outf.write('%s%s%s%s%s%s%s%s%s\n' % \
                       (inX.strip(), ofs, inY.strip(), ofs, x.strip(), \
		        ofs, y.strip(), ofs, z.strip()))

    p.wait()

    if p.returncode != 0:
	grass.warning(_("Projection transform probably failed, please investigate"))
Exemple #29
0
def main():
    # Take into account those extra pixels we'll be a addin'
    max_cols = int(options['maxcols']) - int(options['overlap'])
    max_rows = int(options['maxrows']) - int(options['overlap'])

    if max_cols == 0:
        gcore.fatal(
            _("It is not possible to set 'maxcols=%s' and "
              "'overlap=%s'. Please set maxcols>overlap" %
              (options['maxcols'], options['overlap'])))
    elif max_rows == 0:
        gcore.fatal(
            _("It is not possible to set 'maxrows=%s' and "
              "'overlap=%s'. Please set maxrows>overlap" %
              (options['maxrows'], options['overlap'])))
    # destination projection
    if not options['destproj']:
        dest_proj = gcore.read_command('g.proj', quiet=True,
                                       flags='jf').rstrip('\n')
        if not dest_proj:
            gcore.fatal(_('g.proj failed'))
    else:
        dest_proj = options['destproj']
    gcore.debug("Getting destination projection -> '%s'" % dest_proj)

    # projection scale
    if not options['destscale']:
        ret = gcore.parse_command('g.proj', quiet=True, flags='j')
        if not ret:
            gcore.fatal(_('g.proj failed'))

        if '+to_meter' in ret:
            dest_scale = ret['+to_meter'].strip()
        else:
            gcore.warning(
                _("Scale (%s) not found, assuming '1'") % '+to_meter')
            dest_scale = '1'
    else:
        dest_scale = options['destscale']
    gcore.debug('Getting destination projection scale -> %s' % dest_scale)

    # set up the projections
    srs_source = {
        'proj': options['sourceproj'],
        'scale': float(options['sourcescale'])
    }
    srs_dest = {'proj': dest_proj, 'scale': float(dest_scale)}

    if options['region']:
        gcore.run_command('g.region', quiet=True, region=options['region'])
    dest_bbox = gcore.region()
    gcore.debug('Getting destination region')

    # output field separator
    fs = separator(options['separator'])

    # project the destination region into the source:
    gcore.verbose('Projecting destination region into source...')
    dest_bbox_points = bboxToPoints(dest_bbox)

    dest_bbox_source_points, errors_dest = projectPoints(dest_bbox_points,
                                                         source=srs_dest,
                                                         dest=srs_source)

    if len(dest_bbox_source_points) == 0:
        gcore.fatal(
            _("There are no tiles available. Probably the output "
              "projection system it is not compatible with the "
              "projection of the current location"))

    source_bbox = pointsToBbox(dest_bbox_source_points)

    gcore.verbose('Projecting source bounding box into destination...')

    source_bbox_points = bboxToPoints(source_bbox)

    source_bbox_dest_points, errors_source = projectPoints(source_bbox_points,
                                                           source=srs_source,
                                                           dest=srs_dest)

    x_metric = 1 / dest_bbox['ewres']
    y_metric = 1 / dest_bbox['nsres']

    gcore.verbose('Computing length of sides of source bounding box...')

    source_bbox_dest_lengths = sideLengths(source_bbox_dest_points, x_metric,
                                           y_metric)

    # Find the skewedness of the two directions.
    # Define it to be greater than one
    # In the direction (x or y) in which the world is least skewed (ie north south in lat long)
    # Divide the world into strips. These strips are as big as possible contrained by max_
    # In the other direction do the same thing.
    # There's some recomputation of the size of the world that's got to come in
    # here somewhere.

    # For now, however, we are going to go ahead and request more data than is necessary.
    # For small regions far from the critical areas of projections this makes very little difference
    # in the amount of data gotten.
    # We can make this efficient for big regions or regions near critical
    # points later.

    bigger = []
    bigger.append(max(source_bbox_dest_lengths['x']))
    bigger.append(max(source_bbox_dest_lengths['y']))
    maxdim = (max_cols, max_rows)

    # Compute the number and size of tiles to use in each direction
    # I'm making fairly even sized tiles
    # They differer from each other in height and width only by one cell
    # I'm going to make the numbers all simpler and add this extra cell to
    # every tile.

    gcore.message(_('Computing tiling...'))
    tiles = [-1, -1]
    tile_base_size = [-1, -1]
    tiles_extra_1 = [-1, -1]
    tile_size = [-1, -1]
    tileset_size = [-1, -1]
    tile_size_overlap = [-1, -1]
    for i in range(len(bigger)):
        # make these into integers.
        # round up
        bigger[i] = int(bigger[i] + 1)
        tiles[i] = int((bigger[i] / maxdim[i]) + 1)
        tile_size[i] = tile_base_size[i] = int(bigger[i] / tiles[i])
        tiles_extra_1[i] = int(bigger[i] % tiles[i])
        # This is adding the extra pixel (remainder) to all of the tiles:
        if tiles_extra_1[i] > 0:
            tile_size[i] = tile_base_size[i] + 1
        tileset_size[i] = int(tile_size[i] * tiles[i])
        # Add overlap to tiles (doesn't effect tileset_size
        tile_size_overlap[i] = tile_size[i] + int(options['overlap'])

    gcore.verbose("There will be %d by %d tiles each %d by %d cells" %
                  (tiles[0], tiles[1], tile_size[0], tile_size[1]))

    ximax = tiles[0]
    yimax = tiles[1]

    min_x = source_bbox['w']
    min_y = source_bbox['s']
    max_x = source_bbox['e']
    max_y = source_bbox['n']
    span_x = (max_x - min_x)
    span_y = (max_y - min_y)

    xi = 0
    tile_bbox = {'w': -1, 's': -1, 'e': -1, 'n': -1}

    if errors_dest > 0:
        gcore.warning(
            _("During computation %i tiles could not be created" %
              errors_dest))

    while xi < ximax:
        tile_bbox['w'] = float(min_x) + (float(xi) * float(
            tile_size[0]) / float(tileset_size[0])) * float(span_x)
        tile_bbox['e'] = float(min_x) + (float(xi + 1) * float(
            tile_size_overlap[0]) / float(tileset_size[0])) * float(span_x)
        yi = 0
        while yi < yimax:
            tile_bbox['s'] = float(min_y) + (float(yi) * float(
                tile_size[1]) / float(tileset_size[1])) * float(span_y)
            tile_bbox['n'] = float(min_y) + (float(yi + 1) * float(
                tile_size_overlap[1]) / float(tileset_size[1])) * float(span_y)
            tile_bbox_points = bboxToPoints(tile_bbox)
            tile_dest_bbox_points, errors = projectPoints(tile_bbox_points,
                                                          source=srs_source,
                                                          dest=srs_dest)
            tile_dest_bbox = pointsToBbox(tile_dest_bbox_points)
            if bboxesIntersect(tile_dest_bbox, dest_bbox):
                if flags['w']:
                    print("bbox=%s,%s,%s,%s&width=%s&height=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                elif flags['g']:
                    print("w=%s;s=%s;e=%s;n=%s;cols=%s;rows=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                else:
                    print("%s%s%s%s%s%s%s%s%s%s%s" %
                          (tile_bbox['w'], fs, tile_bbox['s'], fs,
                           tile_bbox['e'], fs, tile_bbox['n'], fs,
                           tile_size_overlap[0], fs, tile_size_overlap[1]))
            yi += 1
        xi += 1
Exemple #30
0
def main():
    infile = options['input']
    compression_off = flags['c']
    mapset = None
    if '@' in infile:
        infile, mapset = infile.split('@')

    if options['output']:
        outfile_path, outfile_base = os.path.split(
            os.path.abspath(options['output']))
    else:
        outfile_path, outfile_base = os.path.split(
            os.path.abspath(infile + ".pack"))

    outfile = os.path.join(outfile_path, outfile_base)

    global tmp
    tmp = grass.tempdir()
    tmp_dir = os.path.join(tmp, infile)
    os.mkdir(tmp_dir)
    grass.debug('tmp_dir = %s' % tmp_dir)

    gfile = grass.find_file(name=infile, element='cell', mapset=mapset)
    if not gfile['name']:
        grass.fatal(_("Raster map <%s> not found") % infile)

    if os.path.exists(outfile):
        if os.getenv('GRASS_OVERWRITE'):
            grass.warning(
                _("Pack file <%s> already exists and will be overwritten") %
                outfile)
            try_remove(outfile)
        else:
            grass.fatal(_("option <output>: <%s> exists.") % outfile)

    grass.message(_("Packing <%s> to <%s>...") % (gfile['fullname'], outfile))
    basedir = os.path.sep.join(
        os.path.normpath(gfile['file']).split(os.path.sep)[:-2])
    olddir = os.getcwd()

    # copy elements
    info = grass.parse_command('r.info', flags='e', map=infile)
    vrt_files = {}
    if info['maptype'] == 'virtual':
        map_file = grass.find_file(
            name=infile,
            element='cell_misc',
        )
        if map_file['file']:
            vrt = os.path.join(map_file['file'], 'vrt')
            if os.path.exists(vrt):
                with open(vrt, 'r') as f:
                    for r in f.readlines():
                        map, mapset = r.split('@')
                        map_basedir = os.path.sep.join(
                            os.path.normpath(map_file['file'], ).split(
                                os.path.sep)[:-2], )
                        vrt_files[map] = map_basedir

    for element in [
            'cats',
            'cell',
            'cellhd',
            'cell_misc',
            'colr',
            'fcell',
            'hist',
    ]:
        path = os.path.join(basedir, element, infile)
        if os.path.exists(path):
            grass.debug('copying %s' % path)
            if os.path.isfile(path):
                shutil.copyfile(
                    path,
                    os.path.join(tmp_dir, element),
                )
            else:
                shutil.copytree(
                    path,
                    os.path.join(tmp_dir, element),
                )

        # Copy vrt files
        if vrt_files:
            for f in vrt_files.keys():
                f_tmp_dir = os.path.join(tmp, f)
                if not os.path.exists(f_tmp_dir):
                    os.mkdir(f_tmp_dir)
                path = os.path.join(vrt_files[f], element, f)
                if os.path.exists(path):
                    grass.debug("copying vrt file {}".format(path))
                    if os.path.isfile(path):
                        shutil.copyfile(
                            path,
                            os.path.join(f_tmp_dir, element),
                        )
                    else:
                        shutil.copytree(
                            path,
                            os.path.join(f_tmp_dir, element),
                        )

    if not os.listdir(tmp_dir):
        grass.fatal(_("No raster map components found"))

    # copy projection info
    # (would prefer to use g.proj*, but this way is 5.3 and 5.7 compat)
    gisenv = grass.gisenv()
    for support in ['INFO', 'UNITS', 'EPSG']:
        path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'],
                            'PERMANENT', 'PROJ_' + support)
        if os.path.exists(path):
            shutil.copyfile(path, os.path.join(tmp_dir, 'PROJ_' + support))

    # pack it all up
    os.chdir(tmp)
    if compression_off:
        tar = tarfile.TarFile.open(name=outfile_base, mode='w:')
    else:
        tar = tarfile.TarFile.open(name=outfile_base, mode='w:gz')
    tar.add(infile, recursive=True)
    if vrt_files:
        for f in vrt_files.keys():
            tar.add(f, recursive=True)

    tar.close()
    try:
        shutil.move(outfile_base, outfile)
    except shutil.Error as e:
        grass.fatal(e)

    os.chdir(olddir)

    grass.verbose(_("Raster map saved to '%s'" % outfile))
Exemple #31
0
def main():
    coords = options['coordinates']
    input = options['input']
    output = options['output']
    fs = options['separator']
    proj_in = options['proj_in']
    proj_out = options['proj_out']
    ll_in = flags['i']
    ll_out = flags['o']
    decimal = flags['d']
    copy_input = flags['e']
    include_header = flags['c']

    # check for cs2cs
    if not gcore.find_program('cs2cs'):
        gcore.fatal(
            _("cs2cs program not found, install PROJ.4 first: \
            http://proj.maptools.org"))

    # check for overenthusiasm
    if proj_in and ll_in:
        gcore.fatal(_("Choose only one input parameter method"))

    if proj_out and ll_out:
        gcore.fatal(_("Choose only one output parameter method"))

    if ll_in and ll_out:
        gcore.fatal(_("Choose only one auto-projection parameter method"))

    if output and not gcore.overwrite() and os.path.exists(output):
        gcore.fatal(_("Output file already exists"))

    if not coords and not input:
        gcore.fatal(_("One of <coordinates> and <input> must be given"))
    if coords and input:
        gcore.fatal(
            _("Options <coordinates> and <input> are mutually exclusive"))

    # parse field separator
    # FIXME: input_x,y needs to split on multiple whitespace between them
    if fs == ',':
        ifs = ofs = ','
    else:
        try:
            ifs, ofs = fs.split(',')
        except ValueError:
            ifs = ofs = fs

    ifs = separator(ifs)
    ofs = separator(ofs)

    # set up projection params
    s = gcore.read_command("g.proj", flags='j')
    kv = parse_key_val(s)
    if "XY location" in kv['+proj'] and (ll_in or ll_out):
        gcore.fatal(_("Unable to project to or from a XY location"))

    in_proj = None

    if ll_in:
        in_proj = "+proj=longlat +datum=WGS84"
        gcore.verbose(
            "Assuming LL WGS84 as input, current projection as output ")

    if ll_out:
        in_proj = gcore.read_command('g.proj', flags='jf')

    if proj_in:
        if '+' in proj_in:
            in_proj = proj_in
        else:
            gcore.fatal(_("Invalid PROJ.4 input specification"))

    if not in_proj:
        gcore.verbose("Assuming current location as input")
        in_proj = gcore.read_command('g.proj', flags='jf')

    in_proj = in_proj.strip()
    gcore.verbose("Input parameters: '%s'" % in_proj)

    out_proj = None

    if ll_out:
        out_proj = "+proj=longlat +datum=WGS84"
        gcore.verbose(
            "Assuming current projection as input, LL WGS84 as output ")

    if ll_in:
        out_proj = gcore.read_command('g.proj', flags='jf')

    if proj_out:
        if '+' in proj_out:
            out_proj = proj_out
        else:
            gcore.fatal(_("Invalid PROJ.4 output specification"))

    if not out_proj:
        gcore.fatal(_("Missing output projection parameters "))
    out_proj = out_proj.strip()
    gcore.verbose("Output parameters: '%s'" % out_proj)

    # set up input file
    if coords:
        x, y = coords.split(',')
        tmpfile = gcore.tempfile()
        fd = open(tmpfile, "w")
        fd.write("%s%s%s\n" % (x, ifs, y))
        fd.close()
        inf = file(tmpfile)
    else:
        if input == '-':
            infile = None
            inf = sys.stdin
        else:
            infile = input
            if not os.path.exists(infile):
                gcore.fatal(_("Unable to read input data"))
            inf = file(infile)
            gcore.debug("input file=[%s]" % infile)

    # set up output file
    if not output:
        outfile = None
        outf = sys.stdout
    else:
        outfile = output
        outf = open(outfile, 'w')
        gcore.debug("output file=[%s]" % outfile)

    # set up output style
    if not decimal:
        outfmt = ["-w5"]
    else:
        outfmt = ["-f", "%.8f"]
    if not copy_input:
        copyinp = []
    else:
        copyinp = ["-E"]

    # do the conversion
    # Convert cs2cs DMS format to GRASS DMS format:
    #   cs2cs | sed -e 's/d/:/g' -e "s/'/:/g"  -e 's/"//g'

    cmd = ['cs2cs'] + copyinp + outfmt + \
        in_proj.split() + ['+to'] + out_proj.split()

    p = gcore.Popen(cmd, stdin=gcore.PIPE, stdout=gcore.PIPE)

    tr = TrThread(ifs, inf, p.stdin)
    tr.start()

    if not copy_input:
        if include_header:
            outf.write("x%sy%sz\n" % (ofs, ofs))
        for line in p.stdout:
            try:
                xy, z = line.split(' ', 1)
                x, y = xy.split('\t')
            except ValueError:
                gcore.fatal(line)

            outf.write('%s%s%s%s%s\n' %
                       (x.strip(), ofs, y.strip(), ofs, z.strip()))
    else:
        if include_header:
            outf.write("input_x%sinput_y%sx%sy%sz\n" % (ofs, ofs, ofs, ofs))
        for line in p.stdout:
            inXYZ, x, rest = line.split('\t')
            inX, inY = inXYZ.split(' ')[:2]
            y, z = rest.split(' ', 1)
            outf.write('%s%s%s%s%s%s%s%s%s\n' %
                       (inX.strip(), ofs, inY.strip(), ofs, x.strip(), ofs,
                        y.strip(), ofs, z.strip()))

    p.wait()

    if p.returncode != 0:
        gcore.warning(
            _("Projection transform probably failed, please investigate"))
Exemple #32
0
def main():
    infile = options['input']
    
    global tmp_dir
    tmp_dir = grass.tempdir()
    grass.debug('tmp_dir = %s' % tmp_dir)
    
    if not os.path.exists(infile):
        grass.fatal(_("File <%s> not found" % infile))
    
    gisenv = grass.gisenv()
    mset_dir = os.path.join(gisenv['GISDBASE'],
                            gisenv['LOCATION_NAME'],
                            gisenv['MAPSET'])
    input_base = os.path.basename(infile)
    shutil.copyfile(infile, os.path.join(tmp_dir, input_base))
    os.chdir(tmp_dir)
    tar = tarfile.TarFile.open(name = input_base, mode = 'r:gz')
    try:
        data_name = tar.getnames()[0]
    except:
        grass.fatal(_("Pack file unreadable"))
    
    if options['output']:
        map_name = options['output']
    else:
        map_name = data_name
    
    gfile = grass.find_file(name = map_name, element = 'cell',
                            mapset = '.')
    overwrite = os.getenv('GRASS_OVERWRITE')
    if gfile['file'] and overwrite != '1':
        grass.fatal(_("Raster map <%s> already exists") % map_name)
    
    # extract data
    tar.extractall()
    os.chdir(data_name)
    
    # check projection compatibility in a rather crappy way
    if not filecmp.cmp('PROJ_INFO', os.path.join(mset_dir, '..', 'PERMANENT', 'PROJ_INFO')):
        if flags['o']:
            grass.warning(_("Projection information does not match. Proceeding..."))
        else:
            grass.fatal(_("Projection information does not match. Aborting."))
    
    # install in $MAPSET
    for element in ['cats', 'cell', 'cellhd', 'cell_misc', 'colr', 'fcell', 'hist']:
        if not os.path.exists(element):
            continue
        path = os.path.join(mset_dir, element)
        if not os.path.exists(path):
            os.mkdir(path)
        if element == 'cell_misc':
            path = os.path.join(mset_dir, element, map_name)
            if os.path.exists(path):
                shutil.rmtree(path)
            shutil.copytree('cell_misc', path)
        else:
            shutil.copyfile(element, os.path.join(mset_dir, element, map_name))
    
    grass.verbose(_("Raster map saved to <%s>") % map_name)
Exemple #33
0
def main():
    shell = flags['g']
    serial = flags['s']
    bands = options['input'].split(',')

    if len(bands) < 4:
        grass.fatal(_("At least four input maps required"))

    output = options['output']
    # calculate the Stddev for TM bands
    grass.message(_("Calculating standard deviations for all bands..."))
    stddev = {}

    if serial:
        for band in bands:
            grass.verbose("band %d" % band)
            s = grass.read_command('r.univar', flags='g', map=band)
            kv = parse_key_val(s)
            stddev[band] = float(kv['stddev'])
    else:
        # run all bands in parallel
        if "WORKERS" in os.environ:
            workers = int(os.environ["WORKERS"])
        else:
            workers = len(bands)
        proc = {}
        pout = {}

        # spawn jobs in the background
        n = 0
        for band in bands:
            proc[band] = grass.pipe_command('r.univar', flags='g', map=band)
            if n % workers is 0:
                # wait for the ones launched so far to finish
                for bandp in bands[:n]:
                    if not proc[bandp].stdout.closed:
                        pout[bandp] = proc[bandp].communicate()[0]
                    proc[bandp].wait()
            n = n + 1

        # wait for jobs to finish, collect the output
        for band in bands:
            if not proc[band].stdout.closed:
                pout[band] = proc[band].communicate()[0]
            proc[band].wait()

    # parse the results
        for band in bands:
            kv = parse_key_val(pout[band])
            stddev[band] = float(kv['stddev'])


    grass.message(_("Calculating Correlation Matrix..."))
    correlation = {}
    s = grass.read_command('r.covar', flags='r', map=[band for band in bands],
                           quiet=True)

    # We need to skip the first line, since r.covar prints the number of values
    lines = s.splitlines()
    for i, row in zip(bands, lines[1:]):
        for j, cell in zip(bands, row.split(' ')):
            correlation[i, j] = float(cell)

    # Calculate all combinations
    grass.message(_("Calculating OIF for all band combinations..."))

    oif = []
    for p in perms(bands):
        oif.append((oifcalc(stddev, correlation, *p), p))
    oif.sort(reverse=True)

    grass.verbose(_("The Optimum Index Factor analysis result " \
                    "(best combination shown first):"))

    if shell:
        fmt = "%s,%s,%s:%.4f\n"
    else:
        fmt = "%s, %s, %s:  %.4f\n"

    if not output or output == '-':
        for v, p in oif:
            sys.stdout.write(fmt % (p + (v,)))
    else:
        outf = file(output, 'w')
        for v, p in oif:
            outf.write(fmt % (p + (v,)))
        outf.close()
Exemple #34
0
        sys.exit(0)

    # set command variable
    monName = sys.argv[1]
    monPath = sys.argv[2]
    monFile = {
        'map': os.path.join(monPath, 'map.ppm'),
        'cmd': os.path.join(monPath, 'cmd'),
        'env': os.path.join(monPath, 'env')
    }

    # monitor size
    monSize = (int(sys.argv[3]), int(sys.argv[4]))

    monDecor = not bool(int(sys.argv[5]))
    grass.verbose(_("Starting map display <%s>...") % (monName))

    # create pid file
    pidFile = os.path.join(monPath, "pid")
    fd = open(pidFile, 'w')
    if not fd:
        grass.fatal(_("Unable to create file <%s>") % pidFile)
    fd.write("%s\n" % os.getpid())
    fd.close()

    RunCommand('g.gisenv',
               set='MONITOR_%s_PID=%d' % (monName.upper(), os.getpid()))

    start = time.time()
    gmMap = MapApp(0)
    mapFrame = gmMap.CreateMapFrame(monName, monDecor)
def detect_compute_networks(vname, vmapset, pname, pmapset, output, order,
                            columns, threshold):
    """
    Detect the start edges and nodes, compute the stream networks
    and stream orders, reverse edges and write everything into the
    output vector map.

    :param vname: Name of the input stream vector map
    :param vmapset: Mapset name of the input stream vector map
    :param pname: Name of the input outlet points vector map
    :param pmapset: Mapset name of the input outlet points vector map
    :param output: Name of the output stream vector map
    :param order: Comma separated list of order algorithms
    :param columns: Comma separated list of column names that should be copied to the output
    :param threshold: The point search threshold to find start edges and nodes
    :return:
    """

    v = VectorTopo(name=vname, mapset=vmapset)
    p = VectorTopo(name=pname, mapset=pmapset)

    v.open(mode="r")
    p.open(mode="r")

    copy_columns = None

    # Check for copy columns only if the input vector map
    # has an attribute table
    if v.table and columns:

        # These are the column names that are newly created
        # and it must be checked if their names
        # are exist in the input map column names
        # that should be copied
        # Synchronize these names with the graph_to_vector() function
        new_column_names = order.split(",")
        new_column_names.append("cat")
        new_column_names.append("outlet_cat")
        new_column_names.append("network")
        new_column_names.append("reversed")
        # Add the order colum names
        new_column_names.extend(order)

        # Check if all columns should be copied
        if columns.lower() == "all":
            columns = ",".join(v.table.columns.names())

        copy_columns = []
        for column in columns.split(","):
            # Copy only columns that exists
            if column in v.table.columns.names():
                col_index = v.table.columns.names().index(column)
                col_type = v.table.columns.types()[col_index]
                # Rename the column if it conflicts with the
                # order column names in the output map
                if column in new_column_names:

                    # Create name suffix and make sure that the new column name
                    # does not exists
                    number = 1
                    suffix = ""
                    while True:
                        suffix = "_%i" % number
                        if (column + suffix not in new_column_names
                                and column + suffix not in columns.split(",")):
                            break
                        number += 1

                    grass.warning(
                        _("Column name conflict: Renaming column "
                          "<%(col)s> from input map into %(col)s%(ap)s "
                          "in output map" % {
                              "col": column,
                              "ap": suffix
                          }))
                    column += suffix
                copy_columns.append((col_index, column, col_type))
            else:
                v.close()
                p.close()
                grass.fatal(
                    _("Column %s is not in attribute table of <%s>" %
                      (column, vname)))

    # Detect closest edges and nodes to the outflow points
    # But why nodes, arent edges sufficient?
    #     They may be useful when detecting loops and channels
    #     in further improvements of v.stream.order.
    start_nodes = []
    start_node_ids = []
    start_edges = []
    outlet_cats = []

    for point in p:
        p_coords = point.coords()

        line = v.find_by_point.geo(point=point,
                                   maxdist=float(threshold),
                                   type="line")

        if line:
            n1, n2 = line.nodes()

            n1_coords = n1.coords()
            n2_coords = n2.coords()

            # Compute closest node to the outflow point
            dist1 = math.sqrt((p_coords[0] - n1_coords[0])**2 +
                              (p_coords[1] - n1_coords[1])**2)
            dist2 = math.sqrt((p_coords[0] - n2_coords[0])**2 +
                              (p_coords[1] - n2_coords[1])**2)

            if dist1 < dist2:
                closest_node = n1
            else:
                closest_node = n2

            grass.verbose(
                _("Detect edge <%i> for outflow point %s" %
                  (line.id, point.to_wkt())))

            # Ignore identical starting points to avoid
            # redundant networks in the output
            if closest_node.id not in start_node_ids:
                start_nodes.append(closest_node)
                start_node_ids.append(closest_node.id)

            if line.id not in start_edges:
                start_edges.append(line.id)
                outlet_cats.append(point.cat)
            else:
                grass.warning(_("Ignoring duplicated start edge"))

    p.close()

    if len(start_edges) == 0:
        v.close()
        grass.fatal(_("Unable to find start edges"))

    if len(start_nodes) == 0:
        v.close()
        grass.fatal(_("Unable to find start nodes"))

    # We create a graph representation for further computations
    graphs = []

    # Traverse each network from the outflow node on
    for node in start_nodes:
        graph_nodes = {}
        graph_edges = {}
        graph_nodes[node.id] = GraphNode(node.id,
                                         [lid for lid in node.ilines()])
        traverse_network_create_graph(v, node, graph_nodes, graph_edges)
        # For now we only use the edges graph
        graphs.append(graph_edges)

    # Close the vector map, since we have our own graph representation
    v.close()

    # Set stream order types
    order_types = []

    if order.find("strahler") >= 0:
        order_types.append(ORDER_STRAHLER)
    if order.find("scheidegger") >= 0:
        order_types.append(ORDER_SCHEIDEGGER)
    if order.find("drwal") >= 0:
        order_types.append(ORDER_DRWAL)
    if order.find("horton") >= 0:
        order_types.append(ORDER_HORTON)
    if order.find("shreve") >= 0:
        order_types.append(ORDER_SHREVE)

    # Compute the stream orders
    for i in xrange(len(start_edges)):
        edge_id = start_edges[i]
        checked_edges = []
        reversed_edges = []
        traverse_graph_create_stream_order(edge_id, graphs[i], checked_edges,
                                           reversed_edges, order_types)

    # Write the graphs as vector map
    graph_to_vector(vname, vmapset, graphs, output, order_types, outlet_cats,
                    copy_columns)
Exemple #36
0
def main():
    infile = options["input"]
    output = options["output"]
    method = options["method"]
    dtype = options["type"]
    fs = options["separator"]
    x = options["x"]
    y = options["y"]
    z = options["z"]
    value_column = options["value_column"]
    vrange = options["vrange"]
    vscale = options["vscale"]
    percent = options["percent"]
    pth = options["pth"]
    trim = options["trim"]
    workers = int(options["workers"])
    scan_only = flags["s"]
    shell_style = flags["g"]
    ignore_broken = flags["i"]

    if workers == 1 and "WORKERS" in os.environ:
        workers = int(os.environ["WORKERS"])

    if not os.path.exists(infile):
        grass.fatal(_("Unable to read input file <%s>") % infile)

    addl_opts = {}
    if pth:
        addl_opts["pth"] = "%s" % pth
    if trim:
        addl_opts["trim"] = "%s" % trim
    if value_column:
        addl_opts["value_column"] = "%s" % value_column
    if vrange:
        addl_opts["vrange"] = "%s" % vrange
    if vscale:
        addl_opts["vscale"] = "%s" % vscale
    if ignore_broken:
        addl_opts["flags"] = "i"

    if scan_only or shell_style:
        if shell_style:
            doShell = "g"
        else:
            doShell = ""
        grass.run_command(
            "r.in.xyz",
            flags="s" + doShell,
            input=infile,
            output="dummy",
            sep=fs,
            x=x,
            y=y,
            z=z,
            **addl_opts,
        )
        sys.exit()

    if dtype == "float":
        data_type = "FCELL"
    else:
        data_type = "DCELL"

    region = grass.region(region3d=True)

    if region["nsres"] != region["nsres3"] or region["ewres"] != region[
            "ewres3"]:
        grass.run_command("g.region", flags="3p")
        grass.fatal(
            _("The 2D and 3D region settings are different. Can not continue.")
        )

    grass.verbose(
        _("Region bottom=%.15g  top=%.15g  vertical_cell_res=%.15g  (%d depths)"
          ) % (region["b"], region["t"], region["tbres"], region["depths"]))

    grass.verbose(_("Creating slices ..."))

    # to avoid a point which falls exactly on a top bound from being
    # considered twice, we shrink the
    # For the top slice we keep it though, as someone scanning the bounds
    # may have set the bounds exactly to the data extent (a bad idea, but
    # it happens..)
    eps = 1.0e-15

    # if there are thousands of depths hopefully this array doesn't get too
    # large and so we don't have to worry much about storing/looping through
    # all the finished process infos.
    proc = {}
    pout = {}

    depths = list(range(1, 1 + region["depths"]))

    for i in depths:
        tmp_layer_name = "tmp.r3xyz.%d.%s" % (os.getpid(), "%05d" % i)

        zrange_min = region["b"] + (region["tbres"] * (i - 1))

        if i < region["depths"]:
            zrange_max = region["b"] + (region["tbres"] * i) - eps
        else:
            zrange_max = region["b"] + (region["tbres"] * i)

        # spawn depth layer import job in the background
        # grass.debug("slice %d, <%s>  %% %d" % (band, image[band], band % workers))
        grass.message(
            _("Processing horizontal slice %d of %d [%.15g,%.15g) ...") %
            (i, region["depths"], zrange_min, zrange_max))

        proc[i] = grass.start_command(
            "r.in.xyz",
            input=infile,
            output=tmp_layer_name,
            sep=fs,
            method=method,
            x=x,
            y=y,
            z=z,
            percent=percent,
            type=data_type,
            zrange="%.15g,%.15g" % (zrange_min, zrange_max),
            **addl_opts,
        )

        grass.debug("i=%d, %%=%d  (workers=%d)" % (i, i % workers, workers))
        # print sys.getsizeof(proc)  # sizeof(proc array)  [not so big]

        if i % workers == 0:
            # wait for the ones launched so far to finish
            for p_i in depths[:i]:
                pout[p_i] = proc[p_i].communicate()[0]
                if proc[p_i].wait() != 0:
                    grass.fatal(_("Trouble importing data. Aborting."))

    # wait for jSobs to finish, collect any stray output
    for i in depths:
        pout[i] = proc[i].communicate()[0]
        if proc[i].wait() != 0:
            grass.fatal(_("Trouble importing data. Aborting."))

    del proc

    grass.verbose(_("Assembling 3D cube ..."))

    # input order: lower most strata first
    slices = grass.read_command("g.list",
                                type="raster",
                                sep=",",
                                pattern="tmp.r3xyz.%d.*" % os.getpid()).rstrip(
                                    os.linesep)
    grass.debug(slices)

    try:
        grass.run_command("r.to.rast3", input=slices, output=output)
    except CalledModuleError:
        grass.message(_("Done. 3D raster map <%s> created.") % output)
Exemple #37
0
def main():
    infile = options['input']
    compression_off = flags['c']
    
    global basedir
    basedir = grass.tempdir()
    
    # check if vector map exists
    gfile = grass.find_file(infile, element = 'vector')
    if not gfile['name']:
        grass.fatal(_("Vector map <%s> not found") % infile)
    
    # check if input vector map is in the native format
    if vector.vector_info(gfile['fullname'])['format'] != 'native':
        grass.fatal(_("Unable to pack vector map <%s>. Only native format supported.") % \
                        gfile['fullname'])
    
    # split the name if there is the mapset name
    if infile.find('@'):
        infile = infile.split('@')[0]
    
    # output name
    if options['output']:
        outfile = options['output']
    else:
        outfile = infile + '.pack'
    
    # check if exists the output file
    if os.path.exists(outfile):
        if os.getenv('GRASS_OVERWRITE'):
            grass.warning(_("Pack file <%s> already exists and will be overwritten") % outfile)
            try_remove(outfile)
        else:
            grass.fatal(_("option <%s>: <%s> exists.") % ("output", outfile))
    
    # prepare for packing
    grass.verbose(_("Packing <%s>...") % (gfile['fullname']))
    
    # write tar file, optional compression 
    if compression_off:
        tar = tarfile.open(name = outfile, mode = 'w:')
    else:
        tar = tarfile.open(name = outfile, mode = 'w:gz')
    tar.add(gfile['file'], infile)
    
    # check if exist a db connection for the vector 
    db_vect = vector.vector_db(gfile['fullname'])
    if not db_vect:
        grass.verbose(_('There is not database connected with vector map <%s>') % gfile['fullname'])
    else:
        # for each layer connection save a table in sqlite database
        sqlitedb = os.path.join(basedir, 'db.sqlite')
        for i, dbconn in db_vect.iteritems():
            grass.run_command('db.copy', from_driver = dbconn['driver'], 
                              from_database = dbconn['database'],
                              from_table =  dbconn['table'], 
                              to_driver = 'sqlite', to_database = sqlitedb, 
                              to_table = dbconn['table'])
        tar.add(sqlitedb, 'db.sqlite')
    
    # add to the tar file the PROJ files to check when unpack file    
    gisenv = grass.gisenv()
    for support in ['INFO', 'UNITS']:
        path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'],
                            'PERMANENT', 'PROJ_' + support)
        if os.path.exists(path):
            tar.add(path, 'PROJ_' + support)
    tar.close()
    
    grass.message(_("Pack file <%s> created") % os.path.join(os.getcwd(), outfile))
Exemple #38
0
def main():
    coords = options["coordinates"]
    input = options["input"]
    output = options["output"]
    fs = options["separator"]
    proj_in = options["proj_in"]
    proj_out = options["proj_out"]
    ll_in = flags["i"]
    ll_out = flags["o"]
    decimal = flags["d"]
    copy_input = flags["e"]
    include_header = flags["c"]

    # check for cs2cs
    if not gcore.find_program("cs2cs"):
        gcore.fatal(
            _("cs2cs program not found, install PROJ first: \
            https://proj.org"))

    # parse field separator
    # FIXME: input_x,y needs to split on multiple whitespace between them
    if fs == ",":
        ifs = ofs = ","
    else:
        try:
            ifs, ofs = fs.split(",")
        except ValueError:
            ifs = ofs = fs

    ifs = separator(ifs)
    ofs = separator(ofs)

    # set up projection params
    s = gcore.read_command("g.proj", flags="j")
    kv = parse_key_val(s)
    if "XY location" in kv["+proj"] and (ll_in or ll_out):
        gcore.fatal(_("Unable to project to or from a XY location"))

    in_proj = None

    if ll_in:
        in_proj = "+proj=longlat +datum=WGS84"
        gcore.verbose(
            "Assuming LL WGS84 as input, current projection as output ")

    if ll_out:
        in_proj = gcore.read_command("g.proj", flags="jf")

    if proj_in:
        if "+" in proj_in:
            in_proj = proj_in
        else:
            gcore.fatal(_("Invalid PROJ.4 input specification"))

    if not in_proj:
        gcore.verbose("Assuming current location as input")
        in_proj = gcore.read_command("g.proj", flags="jf")

    in_proj = in_proj.strip()
    gcore.verbose("Input parameters: '%s'" % in_proj)

    out_proj = None

    if ll_out:
        out_proj = "+proj=longlat +datum=WGS84"
        gcore.verbose(
            "Assuming current projection as input, LL WGS84 as output ")

    if ll_in:
        out_proj = gcore.read_command("g.proj", flags="jf")

    if proj_out:
        if "+" in proj_out:
            out_proj = proj_out
        else:
            gcore.fatal(_("Invalid PROJ.4 output specification"))

    if not out_proj:
        gcore.fatal(_("Missing output projection parameters "))
    out_proj = out_proj.strip()
    gcore.verbose("Output parameters: '%s'" % out_proj)

    # set up input file
    if coords:
        x, y = coords.split(",")
        tmpfile = gcore.tempfile()
        fd = open(tmpfile, "w")
        fd.write("%s%s%s\n" % (x, ifs, y))
        fd.close()
        inf = open(tmpfile)
    else:
        if input == "-":
            infile = None
            inf = sys.stdin
        else:
            infile = input
            if not os.path.exists(infile):
                gcore.fatal(_("Unable to read input data"))
            inf = open(infile)
            gcore.debug("input file=[%s]" % infile)

    # set up output file
    if not output:
        outfile = None
        outf = sys.stdout
    else:
        outfile = output
        outf = open(outfile, "w")
        gcore.debug("output file=[%s]" % outfile)

    # set up output style
    if not decimal:
        outfmt = ["-w5"]
    else:
        outfmt = ["-f", "%.8f"]
    if not copy_input:
        copyinp = []
    else:
        copyinp = ["-E"]

    # do the conversion
    # Convert cs2cs DMS format to GRASS DMS format:
    #   cs2cs | sed -e 's/d/:/g' -e "s/'/:/g"  -e 's/"//g'

    cmd = ["cs2cs"
           ] + copyinp + outfmt + in_proj.split() + ["+to"] + out_proj.split()

    p = gcore.Popen(cmd, stdin=gcore.PIPE, stdout=gcore.PIPE)

    tr = TrThread(ifs, inf, p.stdin)
    tr.start()

    if not copy_input:
        if include_header:
            outf.write("x%sy%sz\n" % (ofs, ofs))
        for line in p.stdout:
            try:
                xy, z = decode(line).split(" ", 1)
                x, y = xy.split("\t")
            except ValueError:
                gcore.fatal(line)

            outf.write("%s%s%s%s%s\n" %
                       (x.strip(), ofs, y.strip(), ofs, z.strip()))
    else:
        if include_header:
            outf.write("input_x%sinput_y%sx%sy%sz\n" % (ofs, ofs, ofs, ofs))
        for line in p.stdout:
            inXYZ, x, rest = decode(line).split("\t")
            inX, inY = inXYZ.split(" ")[:2]
            y, z = rest.split(" ", 1)
            outf.write("%s%s%s%s%s%s%s%s%s\n" % (
                inX.strip(),
                ofs,
                inY.strip(),
                ofs,
                x.strip(),
                ofs,
                y.strip(),
                ofs,
                z.strip(),
            ))

    p.wait()

    if p.returncode != 0:
        gcore.warning(
            _("Projection transform probably failed, please investigate"))
Exemple #39
0
def main():
    infile = options['input']

    # create temporary directory
    global tmp_dir
    tmp_dir = grass.tempdir()
    grass.debug('tmp_dir = %s' % tmp_dir)

    # check if the input file exists
    if not os.path.exists(infile):
        grass.fatal(_("File <%s> not found") % infile)

    # copy the files to tmp dir
    input_base = os.path.basename(infile)
    shutil.copyfile(infile, os.path.join(tmp_dir, input_base))
    os.chdir(tmp_dir)
    tar = tarfile.TarFile.open(name=input_base, mode='r')
    try:
        data_name = tar.getnames()[0]
    except:
        grass.fatal(_("Pack file unreadable"))

    if flags['p']:
        # print proj info and exit
        try:
            for fname in ['PROJ_INFO', 'PROJ_UNITS']:
                f = tar.extractfile(fname)
                sys.stdout.write(f.read())
        except KeyError:
            grass.fatal(
                _("Pack file unreadable: file '{}' missing".format(fname)))
        tar.close()

        return 0

    # set the output name
    if options['output']:
        map_name = options['output']
    else:
        map_name = data_name

    # grass env
    gisenv = grass.gisenv()
    mset_dir = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'],
                            gisenv['MAPSET'])

    new_dir = os.path.join(mset_dir, 'vector', map_name)

    gfile = grass.find_file(name=map_name, element='vector', mapset='.')
    overwrite = os.getenv('GRASS_OVERWRITE')
    if gfile['file'] and overwrite != '1':
        grass.fatal(_("Vector map <%s> already exists") % map_name)
    elif overwrite == '1' and gfile['file']:
        grass.warning(
            _("Vector map <%s> already exists and will be overwritten") %
            map_name)
        grass.run_command('g.remove',
                          flags='f',
                          quiet=True,
                          type='vector',
                          name=map_name)
        shutil.rmtree(new_dir, True)

    # extract data
    tar.extractall()
    tar.close()
    if os.path.exists(os.path.join(data_name, 'coor')):
        pass
    elif os.path.exists(os.path.join(data_name, 'cell')):
        grass.fatal(
            _("This GRASS GIS pack file contains raster data. Use "
              "r.unpack to unpack <%s>" % map_name))
    else:
        grass.fatal(_("Pack file unreadable"))

    # check projection compatibility in a rather crappy way
    loc_proj = os.path.join(mset_dir, '..', 'PERMANENT', 'PROJ_INFO')
    loc_proj_units = os.path.join(mset_dir, '..', 'PERMANENT', 'PROJ_UNITS')

    skip_projection_check = False
    if not os.path.exists(os.path.join(tmp_dir, 'PROJ_INFO')):
        if os.path.exists(loc_proj):
            grass.fatal(
                _("PROJ_INFO file is missing, unpack vector map in XY (unprojected) location."
                  ))
        skip_projection_check = True  # XY location

    if not skip_projection_check:
        diff_result_1 = diff_result_2 = None
        if not grass.compare_key_value_text_files(filename_a=os.path.join(
                tmp_dir, 'PROJ_INFO'),
                                                  filename_b=loc_proj,
                                                  proj=True):
            diff_result_1 = diff_files(os.path.join(tmp_dir, 'PROJ_INFO'),
                                       loc_proj)

        if not grass.compare_key_value_text_files(filename_a=os.path.join(
                tmp_dir, 'PROJ_UNITS'),
                                                  filename_b=loc_proj_units,
                                                  units=True):
            diff_result_2 = diff_files(os.path.join(tmp_dir, 'PROJ_UNITS'),
                                       loc_proj_units)

        if diff_result_1 or diff_result_2:
            if flags['o']:
                grass.warning(
                    _("Projection information does not match. Proceeding..."))
            else:
                if diff_result_1:
                    grass.warning(
                        _("Difference between PROJ_INFO file of packed map "
                          "and of current location:\n{diff}").format(
                              diff=''.join(diff_result_1)))
                if diff_result_2:
                    grass.warning(
                        _("Difference between PROJ_UNITS file of packed map "
                          "and of current location:\n{diff}").format(
                              diff=''.join(diff_result_2)))
                grass.fatal(
                    _("Projection of dataset does not appear to match current location."
                      " In case of no significant differences in the projection definitions,"
                      " use the -o flag to ignore them and use"
                      " current location definition."))

    # new db
    fromdb = os.path.join(tmp_dir, 'db.sqlite')
    # copy file
    shutil.copytree(data_name, new_dir)
    # exist fromdb
    if os.path.exists(fromdb):
        # the db connection in the output mapset
        dbconn = grassdb.db_connection(force=True)
        todb = dbconn['database']
        # return all tables
        list_fromtable = grass.read_command('db.tables',
                                            driver='sqlite',
                                            database=fromdb).splitlines()

        # return the list of old connection for extract layer number and key
        dbln = open(os.path.join(new_dir, 'dbln'), 'r')
        dbnlist = dbln.readlines()
        dbln.close()
        # check if dbf or sqlite directory exists
        if dbconn['driver'] == 'dbf' and not os.path.exists(
                os.path.join(mset_dir, 'dbf')):
            os.mkdir(os.path.join(mset_dir, 'dbf'))
        elif dbconn['driver'] == 'sqlite' and not os.path.exists(
                os.path.join(mset_dir, 'sqlite')):
            os.mkdir(os.path.join(mset_dir, 'sqlite'))
        # for each old connection
        for t in dbnlist:
            # it split the line of each connection, to found layer number and key
            if len(t.split('|')) != 1:
                values = t.split('|')
            else:
                values = t.split(' ')

            from_table = values[1]
            layer = values[0].split('/')[0]
            # we need to take care about the table name in case of several layer
            if options["output"]:
                if len(dbnlist) > 1:
                    to_table = "%s_%s" % (map_name, layer)
                else:
                    to_table = map_name
            else:
                to_table = from_table

            grass.verbose(
                _("Coping table <%s> as table <%s>") % (from_table, to_table))

            # copy the table in the default database
            try:
                grass.run_command('db.copy',
                                  to_driver=dbconn['driver'],
                                  to_database=todb,
                                  to_table=to_table,
                                  from_driver='sqlite',
                                  from_database=fromdb,
                                  from_table=from_table)
            except CalledModuleError:
                grass.fatal(
                    _("Unable to copy table <%s> as table <%s>") %
                    (from_table, to_table))

            grass.verbose(
                _("Connect table <%s> to vector map <%s> at layer <%s>") %
                (to_table, map_name, layer))

            # and connect the new tables with the right layer
            try:
                grass.run_command('v.db.connect',
                                  flags='o',
                                  quiet=True,
                                  driver=dbconn['driver'],
                                  database=todb,
                                  map=map_name,
                                  key=values[2],
                                  layer=layer,
                                  table=to_table)
            except CalledModuleError:
                grass.fatal(
                    _("Unable to connect table <%s> to vector map <%s>") %
                    (to_table, map_name))

    grass.message(_("Vector map <%s> successfully unpacked") % map_name)
Exemple #40
0
    def GetTiles(self):
        grass.message(_("Calculating tiles..."))
        tiles = grass.read_command('r.tileset',
                                quiet = True,
                                flags = 'g',
                                sourceproj = self.proj_srs,
                                sourcescale = self.srs_scale,
                                overlap = 2,
                                maxcols = int(self.options['maxcols']),
                                maxrows = int(self.options['maxrows']),
                                **self.tileset_options)
        if not tiles:
            grass.fatal(_("r.tileset failed"))
        tiles = tiles.splitlines()
        
        if self.flags['c']:
            rmfiles = os.path.join(self._tdir, '*')
            grass.verbose("Removing files '%s'" % rmfiles)
            for file in glob.glob(rmfiles):
                if os.path.isdir(file):
                    os.rmdir(file)
                else:
                    os.remove(file)
        
        rf = open(self.request_file, 'w')
        i = 0
        for tile in tiles:
            outputfile = os.path.join(self._tdir, str(i) + self.file_extent)
            worldfile = os.path.join(self._tdir, str(i) + self.worldfile)
            dtile = grass.parse_key_val(tile, vsep=';')
            n = float(dtile['n'])
            self.data['n'] = n
            s = float(dtile['s'])
            self.data['s'] = s
            e = float(dtile['e'])
            self.data['e'] = e
            w = float(dtile['w'])
            self.data['w'] = w
            nr = int(dtile['rows'])
            nc = int(dtile['cols'])
            self.data['width'] = nc
            self.data['height'] = nr
            
            size = "bbox=%f,%f,%f,%f&width=%d&height=%d" % \
                (w, s, e, n, nc, nr)
            xres = (e - w) / nc
            yres = (s - n) / nr
            # center of top left cell
            top_left_cell_center_x = w + xres / 2
            top_left_cell_center_y = n + yres / 2
            
            # write the world file
            wf = open(worldfile, 'w')
            try:
                wf.write("%f\n0.0\n0.0\n%f\n%f\n%f\n" % \
                             (xres, yres, top_left_cell_center_x, top_left_cell_center_y))
            finally:
                wf.close()
            
            # request for data
            string = "service=WMS&request=GetMap&layers=%s&styles=%s&srs=%s&%s&format=%s&%s&%s" % \
                (self.options['layers'], self.options['styles'], self.options['srs'],
                 size, self.format, self.transparency, self.options['wmsquery'])
            rf.write('output=%s;server=%s;string=%s\n' % \
                         (outputfile, self.options['mapserver'], string))
            i += 1
            
        rf.close()
        grass.message(_("Done: requesting %d tiles") % len(tiles))
        if len(tiles) > 200:
	    grass.warning("Proceed with care. This number of tiles may \
	      exceed the maximum command line arguments available from \
	      the operating system later on in the r.in.gdalwarp step. \
	      In addition it may be considered abusive behaivor by the \
	      server providers - check their terms of use.")
Exemple #41
0
def main():
    infile = options["input"]
    compression_off = flags["c"]

    global basedir
    basedir = grass.tempdir()

    # check if vector map exists
    gfile = grass.find_file(infile, element="vector")
    if not gfile["name"]:
        grass.fatal(_("Vector map <%s> not found") % infile)

    # check if input vector map is in the native format
    if vector.vector_info(gfile["fullname"])["format"] != "native":
        grass.fatal(
            _("Unable to pack vector map <%s>. Only native format supported.")
            % gfile["fullname"]
        )

    # split the name if there is the mapset name
    if infile.find("@"):
        infile = infile.split("@")[0]

    # output name
    if options["output"]:
        outfile = options["output"]
    else:
        outfile = infile + ".pack"

    # check if exists the output file
    if os.path.exists(outfile):
        if os.getenv("GRASS_OVERWRITE"):
            grass.warning(
                _("Pack file <%s> already exists and will be overwritten") % outfile
            )
            try_remove(outfile)
        else:
            grass.fatal(_("option <%s>: <%s> exists.") % ("output", outfile))

    # prepare for packing
    grass.verbose(_("Packing <%s>...") % (gfile["fullname"]))

    # write tar file, optional compression
    if compression_off:
        tar = tarfile.open(name=outfile, mode="w:")
    else:
        tar = tarfile.open(name=outfile, mode="w:gz")
    tar.add(gfile["file"], infile)

    # check if exist a db connection for the vector
    db_vect = vector.vector_db(gfile["fullname"])
    if not db_vect:
        grass.verbose(
            _("There is not database connected with vector map <%s>")
            % gfile["fullname"]
        )
    else:
        # for each layer connection save a table in sqlite database
        sqlitedb = os.path.join(basedir, "db.sqlite")
        for i, dbconn in db_vect.items():
            grass.run_command(
                "db.copy",
                from_driver=dbconn["driver"],
                from_database=dbconn["database"],
                from_table=dbconn["table"],
                to_driver="sqlite",
                to_database=sqlitedb,
                to_table=dbconn["table"],
            )
        tar.add(sqlitedb, "db.sqlite")

    # add to the tar file the PROJ files to check when unpack file
    gisenv = grass.gisenv()
    for support in ["INFO", "UNITS", "EPSG"]:
        path = os.path.join(
            gisenv["GISDBASE"], gisenv["LOCATION_NAME"], "PERMANENT", "PROJ_" + support
        )
        if os.path.exists(path):
            tar.add(path, "PROJ_" + support)
    tar.close()

    grass.message(_("Pack file <%s> created") % os.path.join(os.getcwd(), outfile))
Exemple #42
0
def main():
    infile = options['input']

    # create temporary directory
    global tmp_dir
    tmp_dir = grass.tempdir()
    grass.debug('tmp_dir = %s' % tmp_dir)

    # check if the input file exists
    if not os.path.exists(infile):
        grass.fatal(_("File <%s> not found") % infile)

    # copy the files to tmp dir
    input_base = os.path.basename(infile)
    shutil.copyfile(infile, os.path.join(tmp_dir, input_base))
    os.chdir(tmp_dir)
    tar = tarfile.TarFile.open(name=input_base, mode='r')
    try:
        data_name = tar.getnames()[0]
    except:
        grass.fatal(_("Pack file unreadable"))

    # set the output name
    if options['output']:
        map_name = options['output']
    else:
        map_name = data_name

    # grass env
    gisenv = grass.gisenv()
    mset_dir = os.path.join(gisenv['GISDBASE'],
                            gisenv['LOCATION_NAME'],
                            gisenv['MAPSET'])

    new_dir = os.path.join(mset_dir, 'vector', map_name)

    gfile = grass.find_file(name=map_name, element='vector', mapset='.')
    overwrite = os.getenv('GRASS_OVERWRITE')
    if gfile['file'] and overwrite != '1':
        grass.fatal(_("Vector map <%s> already exists") % map_name)
    elif overwrite == '1' and gfile['file']:
        grass.warning(_("Vector map <%s> already exists and will be overwritten") % map_name)
        grass.run_command('g.remove', flags='f', quiet=True, type='vector',
                          name=map_name)
        shutil.rmtree(new_dir, True)

    # extract data
    tar.extractall()
    if os.path.exists(os.path.join(data_name, 'coor')):
        pass
    elif os.path.exists(os.path.join(data_name, 'cell')):
        grass.fatal(_("This GRASS GIS pack file contains raster data. Use "
                      "r.unpack to unpack <%s>" % map_name))
    else:
        grass.fatal(_("Pack file unreadable"))

    # check projection compatibility in a rather crappy way
    loc_proj = os.path.join(mset_dir, '..', 'PERMANENT', 'PROJ_INFO')
    loc_proj_units = os.path.join(mset_dir, '..', 'PERMANENT', 'PROJ_UNITS')

    skip_projection_check = False
    if not os.path.exists(os.path.join(tmp_dir, 'PROJ_INFO')):
        if os.path.exists(loc_proj):
            grass.fatal(
                _("PROJ_INFO file is missing, unpack vector map in XY (unprojected) location."))
        skip_projection_check = True  # XY location

    if not skip_projection_check:
        diff_result_1 = diff_result_2 = None
        if not grass.compare_key_value_text_files(filename_a=os.path.join(tmp_dir, 'PROJ_INFO'),
                                                  filename_b=loc_proj, proj=True):
            diff_result_1 = diff_files(os.path.join(tmp_dir, 'PROJ_INFO'),
                                       loc_proj)

        if not grass.compare_key_value_text_files(filename_a=os.path.join(tmp_dir, 'PROJ_UNITS'),
                                                  filename_b=loc_proj_units,
                                                  units=True):
            diff_result_2 = diff_files(os.path.join(tmp_dir, 'PROJ_UNITS'),
                                       loc_proj_units)

        if diff_result_1 or diff_result_2:
            if flags['o']:
                grass.warning(_("Projection information does not match. Proceeding..."))
            else:
                if diff_result_1:
                    grass.warning(_("Difference between PROJ_INFO file of packed map "
                                    "and of current location:\n{diff}").format(diff=''.join(diff_result_1)))
                if diff_result_2:
                    grass.warning(_("Difference between PROJ_UNITS file of packed map "
                                    "and of current location:\n{diff}").format(diff=''.join(diff_result_2)))
                grass.fatal(_("Projection of dataset does not appear to match current location."
                              " In case of no significant differences in the projection definitions,"
                              " use the -o flag to ignore them and use"
                              " current location definition."))

    # new db
    fromdb = os.path.join(tmp_dir, 'db.sqlite')
    # copy file
    shutil.copytree(data_name, new_dir)
    # exist fromdb
    if os.path.exists(fromdb):
        # the db connection in the output mapset
        dbconn = grassdb.db_connection(force=True)
        todb = dbconn['database']
        # return all tables
        list_fromtable = grass.read_command('db.tables', driver='sqlite',
                                            database=fromdb).splitlines()

        # return the list of old connection for extract layer number and key
        dbln = open(os.path.join(new_dir, 'dbln'), 'r')
        dbnlist = dbln.readlines()
        dbln.close()
        # check if dbf or sqlite directory exists
        if dbconn['driver'] == 'dbf' and not os.path.exists(os.path.join(mset_dir, 'dbf')):
            os.mkdir(os.path.join(mset_dir, 'dbf'))
        elif dbconn['driver'] == 'sqlite' and not os.path.exists(os.path.join(mset_dir, 'sqlite')):
            os.mkdir(os.path.join(mset_dir, 'sqlite'))
        # for each old connection
        for t in dbnlist:
            # it split the line of each connection, to found layer number and key
            if len(t.split('|')) != 1:
                values = t.split('|')
            else:
                values = t.split(' ')

            from_table = values[1]
            layer = values[0].split('/')[0]
            # we need to take care about the table name in case of several layer
            if options["output"]:
                if len(dbnlist) > 1:
                    to_table = "%s_%s" % (map_name, layer)
                else:
                    to_table = map_name
            else:
                to_table = from_table

            grass.verbose(_("Coping table <%s> as table <%s>") % (from_table,
                                                                  to_table))

            # copy the table in the default database
            try:
                grass.run_command('db.copy', to_driver=dbconn['driver'],
                                  to_database=todb, to_table=to_table,
                                  from_driver='sqlite',
                                  from_database=fromdb,
                                  from_table=from_table)
            except CalledModuleError:
                grass.fatal(_("Unable to copy table <%s> as table <%s>") % (from_table, to_table))

            grass.verbose(_("Connect table <%s> to vector map <%s> at layer <%s>") %
                           (to_table, map_name, layer))

            # and connect the new tables with the right layer
            try:
                grass.run_command('v.db.connect', flags='o', quiet=True,
                                  driver=dbconn['driver'], database=todb,
                                  map=map_name, key=values[2],
                                  layer=layer, table=to_table)
            except CalledModuleError:
                grass.fatal(_("Unable to connect table <%s> to vector map <%s>") %
                             (to_table, map_name))

    grass.message(_("Vector map <%s> successfully unpacked") % map_name)
Exemple #43
0
def main():
    # Take into account those extra pixels we'll be a addin'
    max_cols = int(options['maxcols']) - int(options['overlap'])
    max_rows = int(options['maxrows']) - int(options['overlap'])

    if max_cols == 0:
        gcore.fatal(_("It is not possibile to set 'maxcols=%s' and "
                      "'overlap=%s'. Please set maxcols>overlap" %
                      (options['maxcols'], options['overlap'])))
    elif max_rows == 0:
        gcore.fatal(_("It is not possibile to set 'maxrows=%s' and "
                      "'overlap=%s'. Please set maxrows>overlap" %
                      (options['maxrows'], options['overlap'])))
    # destination projection
    if not options['destproj']:
        dest_proj = gcore.read_command('g.proj',
                                       quiet=True,
                                       flags='jf').rstrip('\n')
        if not dest_proj:
            gcore.fatal(_('g.proj failed'))
    else:
        dest_proj = options['destproj']
    gcore.debug("Getting destination projection -> '%s'" % dest_proj)

    # projection scale
    if not options['destscale']:
        ret = gcore.parse_command('g.proj',
                                  quiet=True,
                                  flags='j')
        if not ret:
            gcore.fatal(_('g.proj failed'))

        if '+to_meter' in ret:
            dest_scale = ret['+to_meter'].strip()
        else:
            gcore.warning(
                _("Scale (%s) not found, assuming '1'") %
                '+to_meter')
            dest_scale = '1'
    else:
        dest_scale = options['destscale']
    gcore.debug('Getting destination projection scale -> %s' % dest_scale)

    # set up the projections
    srs_source = {'proj': options['sourceproj'],
                  'scale': float(options['sourcescale'])}
    srs_dest = {'proj': dest_proj, 'scale': float(dest_scale)}

    if options['region']:
        gcore.run_command('g.region',
                          quiet=True,
                          region=options['region'])
    dest_bbox = gcore.region()
    gcore.debug('Getting destination region')

    # output field separator
    fs = separator(options['separator'])

    # project the destination region into the source:
    gcore.verbose('Projecting destination region into source...')
    dest_bbox_points = bboxToPoints(dest_bbox)

    dest_bbox_source_points, errors_dest = projectPoints(dest_bbox_points,
                                                         source=srs_dest,
                                                         dest=srs_source)

    if len(dest_bbox_source_points) == 0:
        gcore.fatal(_("There are no tiles available. Probably the output "
                      "projection system it is not compatible with the "
                      "projection of the current location"))

    source_bbox = pointsToBbox(dest_bbox_source_points)

    gcore.verbose('Projecting source bounding box into destination...')

    source_bbox_points = bboxToPoints(source_bbox)

    source_bbox_dest_points, errors_source = projectPoints(source_bbox_points,
                                                           source=srs_source,
                                                           dest=srs_dest)

    x_metric = 1 / dest_bbox['ewres']
    y_metric = 1 / dest_bbox['nsres']

    gcore.verbose('Computing length of sides of source bounding box...')

    source_bbox_dest_lengths = sideLengths(source_bbox_dest_points,
                                           x_metric, y_metric)

    # Find the skewedness of the two directions.
    # Define it to be greater than one
    # In the direction (x or y) in which the world is least skewed (ie north south in lat long)
    # Divide the world into strips. These strips are as big as possible contrained by max_
    # In the other direction do the same thing.
    # Theres some recomputation of the size of the world that's got to come in
    # here somewhere.

    # For now, however, we are going to go ahead and request more data than is necessary.
    # For small regions far from the critical areas of projections this makes very little difference
    # in the amount of data gotten.
    # We can make this efficient for big regions or regions near critical
    # points later.

    bigger = []
    bigger.append(max(source_bbox_dest_lengths['x']))
    bigger.append(max(source_bbox_dest_lengths['y']))
    maxdim = (max_cols, max_rows)

    # Compute the number and size of tiles to use in each direction
    # I'm making fairly even sized tiles
    # They differer from each other in height and width only by one cell
    # I'm going to make the numbers all simpler and add this extra cell to
    # every tile.

    gcore.message(_('Computing tiling...'))
    tiles = [-1, -1]
    tile_base_size = [-1, -1]
    tiles_extra_1 = [-1, -1]
    tile_size = [-1, -1]
    tileset_size = [-1, -1]
    tile_size_overlap = [-1, -1]
    for i in range(len(bigger)):
        # make these into integers.
        # round up
        bigger[i] = int(bigger[i] + 1)
        tiles[i] = int((bigger[i] / maxdim[i]) + 1)
        tile_size[i] = tile_base_size[i] = int(bigger[i] / tiles[i])
        tiles_extra_1[i] = int(bigger[i] % tiles[i])
        # This is adding the extra pixel (remainder) to all of the tiles:
        if tiles_extra_1[i] > 0:
            tile_size[i] = tile_base_size[i] + 1
        tileset_size[i] = int(tile_size[i] * tiles[i])
        # Add overlap to tiles (doesn't effect tileset_size
        tile_size_overlap[i] = tile_size[i] + int(options['overlap'])

    gcore.verbose("There will be %d by %d tiles each %d by %d cells" %
                  (tiles[0], tiles[1], tile_size[0], tile_size[1]))

    ximax = tiles[0]
    yimax = tiles[1]

    min_x = source_bbox['w']
    min_y = source_bbox['s']
    max_x = source_bbox['e']
    max_y = source_bbox['n']
    span_x = (max_x - min_x)
    span_y = (max_y - min_y)

    xi = 0
    tile_bbox = {'w': -1, 's': -1, 'e': -1, 'n': -1}

    if errors_dest > 0:
        gcore.warning(_("During computation %i tiles could not be created" %
                        errors_dest))

    while xi < ximax:
        tile_bbox['w'] = float(
            min_x) + (float(xi) * float(tile_size[0]) / float(tileset_size[0])) * float(span_x)
        tile_bbox['e'] = float(min_x) + (float(xi + 1) * float(tile_size_overlap[0]
                                                               ) / float(tileset_size[0])) * float(span_x)
        yi = 0
        while yi < yimax:
            tile_bbox['s'] = float(
                min_y) + (float(yi) * float(tile_size[1]) / float(tileset_size[1])) * float(span_y)
            tile_bbox['n'] = float(min_y) + (
                float(yi + 1) * float(tile_size_overlap[1]) /
                float(tileset_size[1])) * float(span_y)
            tile_bbox_points = bboxToPoints(tile_bbox)
            tile_dest_bbox_points, errors = projectPoints(tile_bbox_points,
                                                          source=srs_source,
                                                          dest=srs_dest)
            tile_dest_bbox = pointsToBbox(tile_dest_bbox_points)
            if bboxesIntersect(tile_dest_bbox, dest_bbox):
                if flags['w']:
                    print("bbox=%s,%s,%s,%s&width=%s&height=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                elif flags['g']:
                    print("w=%s;s=%s;e=%s;n=%s;cols=%s;rows=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                else:
                    print("%s%s%s%s%s%s%s%s%s%s%s" %
                          (tile_bbox['w'], fs, tile_bbox['s'], fs,
                           tile_bbox['e'], fs, tile_bbox['n'], fs,
                           tile_size_overlap[0], fs, tile_size_overlap[1]))
            yi += 1
        xi += 1
Exemple #44
0
def main():
    developments = options['development'].split(',')
    observed_popul_file = options['observed_population']
    projected_popul_file = options['projected_population']
    sep = gutils.separator(options['separator'])
    subregions = options['subregions']
    methods = options['method'].split(',')
    plot = options['plot']
    simulation_times = [
        float(each) for each in options['simulation_times'].split(',')
    ]

    for each in methods:
        if each in ('exp_approach', 'logarithmic2'):
            try:
                from scipy.optimize import curve_fit
            except ImportError:
                gcore.fatal(
                    _("Importing scipy failed. Method '{m}' is not available").
                    format(m=each))

    # exp approach needs at least 3 data points
    if len(developments) <= 2 and ('exp_approach' in methods
                                   or 'logarithmic2' in methods):
        gcore.fatal(_("Not enough data for method 'exp_approach'"))
    if len(developments) == 3 and ('exp_approach' in methods
                                   and 'logarithmic2' in methods):
        gcore.warning(
            _("Can't decide between 'exp_approach' and 'logarithmic2' methods"
              " because both methods can have exact solutions for 3 data points resulting in RMSE = 0"
              ))
    observed_popul = np.genfromtxt(observed_popul_file,
                                   dtype=float,
                                   delimiter=sep,
                                   names=True)
    projected_popul = np.genfromtxt(projected_popul_file,
                                    dtype=float,
                                    delimiter=sep,
                                    names=True)
    year_col = observed_popul.dtype.names[0]
    observed_times = observed_popul[year_col]
    year_col = projected_popul.dtype.names[0]
    projected_times = projected_popul[year_col]

    if len(developments) != len(observed_times):
        gcore.fatal(
            _("Number of development raster maps doesn't not correspond to the number of observed times"
              ))

    # gather developed cells in subregions
    gcore.info(_("Computing number of developed cells..."))
    table_developed = {}
    subregionIds = set()
    for i in range(len(observed_times)):
        gcore.percent(i, len(observed_times), 1)
        data = gcore.read_command('r.univar',
                                  flags='gt',
                                  zones=subregions,
                                  map=developments[i])
        for line in data.splitlines():
            stats = line.split('|')
            if stats[0] == 'zone':
                continue
            subregionId, developed_cells = stats[0], int(stats[12])
            subregionIds.add(subregionId)
            if i == 0:
                table_developed[subregionId] = []
            table_developed[subregionId].append(developed_cells)
        gcore.percent(1, 1, 1)
    subregionIds = sorted(list(subregionIds))
    # linear interpolation between population points
    population_for_simulated_times = {}
    for subregionId in table_developed.keys():
        population_for_simulated_times[subregionId] = np.interp(
            x=simulation_times,
            xp=np.append(observed_times, projected_times),
            fp=np.append(observed_popul[subregionId],
                         projected_popul[subregionId]))
    # regression
    demand = {}
    i = 0
    if plot:
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        n_plots = np.ceil(np.sqrt(len(subregionIds)))
        fig = plt.figure(figsize=(5 * n_plots, 5 * n_plots))

    for subregionId in subregionIds:
        i += 1
        rmse = dict()
        predicted = dict()
        simulated = dict()
        coeff = dict()
        for method in methods:
            # observed population points for subregion
            reg_pop = observed_popul[subregionId]
            simulated[method] = np.array(
                population_for_simulated_times[subregionId])

            if method in ('exp_approach', 'logarithmic2'):
                # we have to scale it first
                y = np.array(table_developed[subregionId])
                magn = float(
                    np.power(
                        10,
                        max(magnitude(np.max(reg_pop)), magnitude(np.max(y)))))
                x = reg_pop / magn
                y = y / magn
                if method == 'exp_approach':
                    initial = (
                        0.5, np.mean(x), np.mean(y)
                    )  # this seems to work best for our data for exp_approach
                elif method == 'logarithmic2':
                    popt, pcov = curve_fit(logarithmic, x, y)
                    initial = (popt[0], popt[1], 0)
                with np.errstate(
                        invalid='warn'
                ):  # when 'raise' it stops every time on FloatingPointError
                    try:
                        popt, pcov = curve_fit(globals()[method],
                                               x,
                                               y,
                                               p0=initial)
                        if np.isnan(popt).any():
                            raise RuntimeError
                        # would result in nans in predicted
                        if method == 'logarithmic2' and np.any(
                                simulated[method] / magn <= popt[-1]):
                            raise RuntimeError
                    except (FloatingPointError, RuntimeError):
                        rmse[
                            method] = sys.maxsize  # so that other method is selected
                        gcore.warning(
                            _("Method '{m}' cannot converge for subregion {reg}"
                              .format(m=method, reg=subregionId)))
                        if len(methods) == 1:
                            gcore.fatal(
                                _("Method '{m}' failed for subregion {reg},"
                                  " please select at least one other method").
                                format(m=method, reg=subregionId))
                    else:
                        predicted[method] = globals()[method](
                            simulated[method] / magn, *popt) * magn
                        r = globals()[method](
                            x, *popt) * magn - table_developed[subregionId]
                        coeff[method] = popt
                        if len(reg_pop) > 3:
                            rmse[method] = np.sqrt(
                                (np.sum(r * r) / (len(reg_pop) - 3)))
                        else:
                            rmse[method] = 0
            else:
                if method == 'logarithmic':
                    reg_pop = np.log(reg_pop)
                if method == 'exponential':
                    y = np.log(table_developed[subregionId])
                else:
                    y = table_developed[subregionId]
                A = np.vstack((reg_pop, np.ones(len(reg_pop)))).T
                npversion = [int(x) for x in np.__version__.split('.')]
                if npversion >= [1, 14, 0]:
                    rcond = None
                else:
                    rcond = -1
                m, c = np.linalg.lstsq(A, y, rcond=rcond)[0]  # y = mx + c
                coeff[method] = m, c

                if method == 'logarithmic':
                    with np.errstate(invalid='ignore', divide='ignore'):
                        predicted[method] = np.where(
                            simulated[method] > 1,
                            np.log(simulated[method]) * m + c, 0)
                    predicted[method] = np.where(predicted[method] > 0,
                                                 predicted[method], 0)
                    r = (reg_pop * m + c) - table_developed[subregionId]
                elif method == 'exponential':
                    predicted[method] = np.exp(m * simulated[method] + c)
                    r = np.exp(m * reg_pop + c) - table_developed[subregionId]
                else:  # linear
                    predicted[method] = simulated[method] * m + c
                    r = (reg_pop * m + c) - table_developed[subregionId]
                # RMSE
                if len(reg_pop) > 2:
                    rmse[method] = np.sqrt(
                        (np.sum(r * r) / (len(reg_pop) - 2)))
                else:
                    rmse[method] = 0

        method = min(rmse, key=rmse.get)
        gcore.verbose(
            _("Method '{meth}' was selected for subregion {reg}").format(
                meth=method, reg=subregionId))
        # write demand
        demand[subregionId] = predicted[method]
        demand[subregionId] = np.diff(demand[subregionId])
        if np.any(demand[subregionId] < 0):
            gcore.warning(
                _("Subregion {sub} has negative numbers"
                  " of newly developed cells, changing to zero".format(
                      sub=subregionId)))
            demand[subregionId][demand[subregionId] < 0] = 0
        if coeff[method][0] < 0:
            # couldn't establish reliable population-area
            # project by number of developed pixels in analyzed period
            range_developed = table_developed[subregionId][
                -1] - table_developed[subregionId][0]
            range_times = observed_times[-1] - observed_times[0]
            dev_per_step = math.ceil(range_developed / float(range_times))
            # this assumes demand is projected yearly
            demand[subregionId].fill(dev_per_step if dev_per_step > 0 else 0)
            gcore.warning(
                _("For subregion {sub} population and development are inversely proportional,"
                  " demand will be interpolated based on prior change in development only."
                  .format(sub=subregionId)))

        # draw
        if plot:
            ax = fig.add_subplot(n_plots, n_plots, i)
            ax.set_title("{sid}, RMSE: {rmse:.3f}".format(sid=subregionId,
                                                          rmse=rmse[method]))
            ax.set_xlabel('population')
            ax.set_ylabel('developed cells')
            # plot known points
            x = np.array(observed_popul[subregionId])
            y = np.array(table_developed[subregionId])
            ax.plot(x, y, marker='o', linestyle='', markersize=8)
            # plot predicted curve
            x_pred = np.linspace(
                np.min(x),
                np.max(np.array(population_for_simulated_times[subregionId])),
                30)
            cf = coeff[method]
            if method == 'linear':
                line = x_pred * cf[0] + cf[1]
                label = "$y = {c:.3f} + {m:.3f} x$".format(m=cf[0], c=cf[1])
            elif method == 'logarithmic':
                line = np.log(x_pred) * cf[0] + cf[1]
                label = "$y = {c:.3f} + {m:.3f} \ln(x)$".format(m=cf[0],
                                                                c=cf[1])
            elif method == 'exponential':
                line = np.exp(x_pred * cf[0] + cf[1])
                label = "$y = {c:.3f} e^{{{m:.3f}x}}$".format(m=cf[0],
                                                              c=np.exp(cf[1]))
            elif method == 'exp_approach':
                line = exp_approach(x_pred / magn, *cf) * magn
                label = "$y = (1 -  e^{{-{A:.3f}(x-{B:.3f})}}) + {C:.3f}$".format(
                    A=cf[0], B=cf[1], C=cf[2])
            elif method == 'logarithmic2':
                line = logarithmic2(x_pred / magn, *cf) * magn
                label = "$y = {A:.3f} + {B:.3f} \ln(x-{C:.3f})$".format(
                    A=cf[0], B=cf[1], C=cf[2])

            ax.plot(x_pred, line, label=label)
            ax.plot(simulated[method],
                    predicted[method],
                    linestyle='',
                    marker='o',
                    markerfacecolor='None')
            plt.legend(loc=0)
            labels = ax.get_xticklabels()
            plt.setp(labels, rotation=30)
    if plot:
        plt.tight_layout()
        fig.savefig(plot)

    # write demand
    with open(options['demand'], 'w') as f:
        header = observed_popul.dtype.names  # the order is kept here
        header = [header[0]
                  ] + [sub for sub in header[1:] if sub in subregionIds]
        f.write(sep.join(header))
        f.write('\n')
        i = 0
        for time in simulation_times[1:]:
            f.write(str(int(time)))
            f.write(sep)
            # put 0 where there are more counties but are not in region
            for sub in header[1:]:  # to keep order of subregions
                f.write(str(int(demand[sub][i])))
                if sub != header[-1]:
                    f.write(sep)
            f.write('\n')
            i += 1
Exemple #45
0
        grass.fatal(_("No raster map components found"))
                    
    # copy projection info
    # (would prefer to use g.proj*, but this way is 5.3 and 5.7 compat)
    gisenv = grass.gisenv()
    for support in ['INFO', 'UNITS']:
        path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'],
                            'PERMANENT', 'PROJ_' + support)
        if os.path.exists(path):
            shutil.copyfile(path, os.path.join(tmp_dir, 'PROJ_' + support))
    
    # pack it all up
    os.chdir(tmp)
    tar = tarfile.TarFile.open(name = outfile, mode = 'w:gz')
    tar.add(infile, recursive = True)
    tar.close()
    try:
        shutil.move(outfile, olddir)
    except shutil.Error, e:
        grass.fatal(e)
        
    os.chdir(olddir)
    
    grass.verbose(_("Raster map saved to '%s'" % \
                        os.path.join(olddir, outfile)))
    
if __name__ == "__main__":
    options, flags = grass.parser()
    atexit.register(cleanup)
    sys.exit(main())
Exemple #46
0
def main():
    infile = options['input']
    compression_off = flags['c']

    global basedir
    basedir = grass.tempdir()

    # check if vector map exists
    gfile = grass.find_file(infile, element='vector')
    if not gfile['name']:
        grass.fatal(_("Vector map <%s> not found") % infile)

    # check if input vector map is in the native format
    if vector.vector_info(gfile['fullname'])['format'] != 'native':
        grass.fatal(
            _("Unable to pack vector map <%s>. Only native format supported.")
            % gfile['fullname'])

    # split the name if there is the mapset name
    if infile.find('@'):
        infile = infile.split('@')[0]

    # output name
    if options['output']:
        outfile = options['output']
    else:
        outfile = infile + '.pack'

    # check if exists the output file
    if os.path.exists(outfile):
        if os.getenv('GRASS_OVERWRITE'):
            grass.warning(
                _("Pack file <%s> already exists and will be overwritten") %
                outfile)
            try_remove(outfile)
        else:
            grass.fatal(_("option <%s>: <%s> exists.") % ("output", outfile))

    # prepare for packing
    grass.verbose(_("Packing <%s>...") % (gfile['fullname']))

    # write tar file, optional compression
    if compression_off:
        tar = tarfile.open(name=outfile, mode='w:')
    else:
        tar = tarfile.open(name=outfile, mode='w:gz')
    tar.add(gfile['file'], infile)

    # check if exist a db connection for the vector
    db_vect = vector.vector_db(gfile['fullname'])
    if not db_vect:
        grass.verbose(
            _('There is not database connected with vector map <%s>') %
            gfile['fullname'])
    else:
        # for each layer connection save a table in sqlite database
        sqlitedb = os.path.join(basedir, 'db.sqlite')
        for i, dbconn in db_vect.items():
            grass.run_command('db.copy',
                              from_driver=dbconn['driver'],
                              from_database=dbconn['database'],
                              from_table=dbconn['table'],
                              to_driver='sqlite',
                              to_database=sqlitedb,
                              to_table=dbconn['table'])
        tar.add(sqlitedb, 'db.sqlite')

    # add to the tar file the PROJ files to check when unpack file
    gisenv = grass.gisenv()
    for support in ['INFO', 'UNITS', 'EPSG']:
        path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'],
                            'PERMANENT', 'PROJ_' + support)
        if os.path.exists(path):
            tar.add(path, 'PROJ_' + support)
    tar.close()

    grass.message(
        _("Pack file <%s> created") % os.path.join(os.getcwd(), outfile))
Exemple #47
0
def main():
    infile = options['input']
    output = options['output']
    method = options['method']
    dtype = options['type']
    fs = options['separator']
    x = options['x']
    y = options['y']
    z = options['z']
    value_column = options['value_column']
    vrange = options['vrange']
    vscale = options['vscale']
    percent = options['percent']
    pth = options['pth']
    trim = options['trim']
    workers = int(options['workers'])
    scan_only = flags['s']
    shell_style = flags['g']
    ignore_broken = flags['i']

    if workers is 1 and "WORKERS" in os.environ:
        workers = int(os.environ["WORKERS"])

    if not os.path.exists(infile):
        grass.fatal(_("Unable to read input file <%s>") % infile)

    addl_opts = {}
    if pth:
        addl_opts['pth'] = '%s' % pth
    if trim:
        addl_opts['trim'] = '%s' % trim
    if value_column:
        addl_opts['value_column'] = '%s' % value_column
    if vrange:
        addl_opts['vrange'] = '%s' % vrange
    if vscale:
        addl_opts['vscale'] = '%s' % vscale
    if ignore_broken:
        addl_opts['flags'] = 'i'

    if scan_only or shell_style:
        if shell_style:
            doShell = 'g'
        else:
            doShell = ''
        grass.run_command('r.in.xyz', flags='s' + doShell, input=infile,
                          output='dummy', sep=fs, x=x, y=y, z=z,
                          **addl_opts)
        sys.exit()

    if dtype == 'float':
        data_type = 'FCELL'
    else:
        data_type = 'DCELL'

    region = grass.region(region3d=True)

    if region['nsres'] != region['nsres3'] or region['ewres'] != region['ewres3']:
        grass.run_command('g.region', flags='3p')
        grass.fatal(_("The 2D and 3D region settings are different. Can not continue."))

    grass.verbose(_("Region bottom=%.15g  top=%.15g  vertical_cell_res=%.15g  (%d depths)")
                  % (region['b'], region['t'], region['tbres'], region['depths']))

    grass.verbose(_("Creating slices ..."))

    # to avoid a point which falls exactly on a top bound from being
    # considered twice, we shrink the
    # For the top slice we keep it though, as someone scanning the bounds
    # may have set the bounds exactly to the data extent (a bad idea, but
    # it happens..)
    eps = 1.0e-15

    # if there are thousands of depths hopefully this array doesn't get too
    # large and so we don't have to worry much about storing/looping through
    # all the finished process infos.
    proc = {}
    pout = {}

    depths = list(range(1, 1 + region['depths']))

    for i in depths:
        tmp_layer_name = 'tmp.r3xyz.%d.%s' % (os.getpid(), '%05d' % i)

        zrange_min = region['b'] + (region['tbres'] * (i - 1))

        if i < region['depths']:
            zrange_max = region['b'] + (region['tbres'] * i) - eps
        else:
            zrange_max = region['b'] + (region['tbres'] * i)

        # spawn depth layer import job in the background
        #grass.debug("slice %d, <%s>  %% %d" % (band, image[band], band % workers))
        grass.message(_("Processing horizontal slice %d of %d [%.15g,%.15g) ...")
                      % (i, region['depths'], zrange_min, zrange_max))

        proc[i] = grass.start_command('r.in.xyz', input=infile, output=tmp_layer_name,
                                      sep=fs, method=method, x=x, y=y, z=z,
                                      percent=percent, type=data_type,
                                      zrange='%.15g,%.15g' % (zrange_min, zrange_max),
                                      **addl_opts)

        grass.debug("i=%d, %%=%d  (workers=%d)" % (i, i % workers, workers))
        # print sys.getsizeof(proc)  # sizeof(proc array)  [not so big]

        if i % workers is 0:
            # wait for the ones launched so far to finish
            for p_i in depths[:i]:
                pout[p_i] = proc[p_i].communicate()[0]
                if proc[p_i].wait() is not 0:
                    grass.fatal(_("Trouble importing data. Aborting."))

    # wait for jSobs to finish, collect any stray output
    for i in depths:
        pout[i] = proc[i].communicate()[0]
        if proc[i].wait() is not 0:
            grass.fatal(_("Trouble importing data. Aborting."))

    del proc

    grass.verbose(_("Assembling 3D cube ..."))

    # input order: lower most strata first
    slices = grass.read_command('g.list', type='raster', sep=',',
                                pattern='tmp.r3xyz.%d.*' % os.getpid()).rstrip(os.linesep)
    grass.debug(slices)

    try:
        grass.run_command('r.to.rast3', input=slices, output=output)
    except CalledModuleError:
        grass.message(_("Done. 3D raster map <%s> created.") % output)
Exemple #48
0
def main():
    infile = options['input']
    compression_off = flags['c']
    mapset = None
    if '@' in infile:
        infile, mapset = infile.split('@')

    if options['output']:
        outfile_path, outfile_base = os.path.split(os.path.abspath(options['output']))
    else:
        outfile_path, outfile_base = os.path.split(os.path.abspath(infile + ".pack"))
    
    outfile = os.path.join(outfile_path, outfile_base)
    
    global tmp
    tmp = grass.tempdir()
    tmp_dir = os.path.join(tmp, infile)
    os.mkdir(tmp_dir)
    grass.debug('tmp_dir = %s' % tmp_dir)
    
    gfile = grass.find_file(name = infile, element = 'cell', mapset = mapset)
    if not gfile['name']:
        grass.fatal(_("Raster map <%s> not found") % infile)
    
    if os.path.exists(outfile):
        if os.getenv('GRASS_OVERWRITE'):
            grass.warning(_("Pack file <%s> already exists and will be overwritten") % outfile)
            try_remove(outfile)
        else:
            grass.fatal(_("option <output>: <%s> exists.") % outfile)
    
    grass.message(_("Packing <%s> to <%s>...") % (gfile['fullname'], outfile))
    basedir = os.path.sep.join(os.path.normpath(gfile['file']).split(os.path.sep)[:-2])
    olddir  = os.getcwd()
    
    # copy elements
    for element in ['cats', 'cell', 'cellhd', 'colr', 'fcell', 'hist']:
        path = os.path.join(basedir, element, infile)
        if os.path.exists(path):
            grass.debug('copying %s' % path)
            shutil.copyfile(path,
                            os.path.join(tmp_dir, element))
            
    if os.path.exists(os.path.join(basedir, 'cell_misc', infile)):
        shutil.copytree(os.path.join(basedir, 'cell_misc', infile),
                        os.path.join(tmp_dir, 'cell_misc'))
        
    if not os.listdir(tmp_dir):
        grass.fatal(_("No raster map components found"))
                    
    # copy projection info
    # (would prefer to use g.proj*, but this way is 5.3 and 5.7 compat)
    gisenv = grass.gisenv()
    for support in ['INFO', 'UNITS', 'EPSG']:
        path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'],
                            'PERMANENT', 'PROJ_' + support)
        if os.path.exists(path):
            shutil.copyfile(path, os.path.join(tmp_dir, 'PROJ_' + support))
    
    # pack it all up
    os.chdir(tmp)
    if compression_off:
        tar = tarfile.TarFile.open(name = outfile_base, mode = 'w:')
    else:
        tar = tarfile.TarFile.open(name = outfile_base, mode = 'w:gz')
    tar.add(infile, recursive = True)
    tar.close()
    try:
        shutil.move(outfile_base, outfile)
    except shutil.Error as e:
        grass.fatal(e)
        
    os.chdir(olddir)
    
    grass.verbose(_("Raster map saved to '%s'" % outfile))
Exemple #49
0
    if len(sys.argv) >= 6:
        try:
            monSize[0] = int(sys.argv[5])
        except ValueError:
            pass
    
    if len(sys.argv) == 7:
        try:
            monSize[1] = int(sys.argv[6])
        except ValueError:
            pass

    import gettext
    gettext.install('grasswxpy', os.path.join(os.getenv("GISBASE"), 'locale'), unicode = True)
    
    grass.verbose(_("Starting map display <%s>...") % (monName))

    RunCommand('g.gisenv',
               set = 'MONITOR_%s_PID=%d' % (monName, os.getpid()))
    
    gmMap = MapApp(0)
    # set title
    gmMap.mapFrm.SetTitle(_("GRASS GIS Map Display: " +
                            monName + 
                            " - Location: " + grass.gisenv()["LOCATION_NAME"]))
    
    gmMap.MainLoop()
    
    grass.verbose(_("Stopping map display <%s>...") % (monName))

    # clean up GRASS env variables
Exemple #50
0
def main():
    shell = flags['g']
    serial = flags['s']
    bands = options['input'].split(',')

    if len(bands) < 4:
        grass.fatal(_("At least four input maps required"))

    output = options['output']
    # calculate the Stddev for TM bands
    grass.message(_("Calculating standard deviations for all bands..."))
    stddev = {}

    if serial:
        for band in bands:
            grass.verbose("band %d" % band)
            s = grass.read_command('r.univar', flags='g', map=band)
            kv = parse_key_val(s)
            stddev[band] = float(kv['stddev'])
    else:
        # run all bands in parallel
        if "WORKERS" in os.environ:
            workers = int(os.environ["WORKERS"])
        else:
            workers = len(bands)
        proc = {}
        pout = {}

        # spawn jobs in the background
        n = 0
        for band in bands:
            proc[band] = grass.pipe_command('r.univar', flags='g', map=band)
            if n % workers is 0:
                # wait for the ones launched so far to finish
                for bandp in bands[:n]:
                    if not proc[bandp].stdout.closed:
                        pout[bandp] = proc[bandp].communicate()[0]
                    proc[bandp].wait()
            n = n + 1

        # wait for jobs to finish, collect the output
        for band in bands:
            if not proc[band].stdout.closed:
                pout[band] = proc[band].communicate()[0]
            proc[band].wait()

    # parse the results
        for band in bands:
            kv = parse_key_val(pout[band])
            stddev[band] = float(kv['stddev'])

    grass.message(_("Calculating Correlation Matrix..."))
    correlation = {}
    s = grass.read_command('r.covar',
                           flags='r',
                           map=[band for band in bands],
                           quiet=True)

    # We need to skip the first line, since r.covar prints the number of values
    lines = s.splitlines()
    for i, row in zip(bands, lines[1:]):
        for j, cell in zip(bands, row.split(' ')):
            correlation[i, j] = float(cell)

    # Calculate all combinations
    grass.message(_("Calculating OIF for all band combinations..."))

    oif = []
    for p in perms(bands):
        oif.append((oifcalc(stddev, correlation, *p), p))
    oif.sort(reverse=True)

    grass.verbose(
        _("The Optimum Index Factor analysis result "
          "(best combination shown first):"))

    if shell:
        fmt = "%s,%s,%s:%.4f\n"
    else:
        fmt = "%s, %s, %s:  %.4f\n"

    if not output or output == '-':
        for v, p in oif:
            sys.stdout.write(fmt % (p + (v, )))
    else:
        outf = file(output, 'w')
        for v, p in oif:
            outf.write(fmt % (p + (v, )))
        outf.close()
Exemple #51
0
    def warp_import(self, file, map):
        """Wrap raster file using gdalwarp and import wrapped file
        into GRASS"""
        warpfile = self.tmp + 'warped.geotiff'
        tmpmapname = map + '_tmp'

        t_srs = grass.read_command('g.proj',
                                   quiet = True,
                                   flags = 'jf').rstrip('\n')
        if not t_srs:
            grass.fatal(_('g.proj failed'))
        
        grass.debug("gdalwarp -s_srs '%s' -t_srs '%s' -r %s %s %s %s" % \
                        (self.options['srs'], t_srs,
                         self.options['method'], self.options['warpoptions'],
                         file, warpfile))
        grass.verbose("Warping input file '%s'..." % os.path.basename(file))
        if self.options['warpoptions']:
            ps = subprocess.Popen(['gdalwarp',
                                   '-s_srs', '%s' % self.options['srs'],
                                   '-t_srs', '%s' % t_srs,
                                   '-r', self.options['method'],
                                   self.options['warpoptions'],
                                   file, warpfile])
        else:
            ps = subprocess.Popen(['gdalwarp',
                                   '-s_srs', '%s' % self.options['srs'],
                                   '-t_srs', '%s' % t_srs,
                                   '-r', self.options['method'],
                                   file, warpfile])
            
        ps.wait()
        if ps.returncode != 0 or \
                not os.path.exists(warpfile):
            grass.fatal(_('gdalwarp failed'))
    
        # import it into a temporary map
        grass.info(_('Importing raster map...'))
        if grass.run_command('r.in.gdal',
                             quiet = True,
                             flags = self.gdal_flags,
                             input = warpfile,
                             output = tmpmapname) != 0:
            grass.fatal(_('r.in.gdal failed'))
        
        os.remove(warpfile)

        # get list of channels
        pattern = tmpmapname + '*'
        grass.debug('Pattern: %s' % pattern)
        mapset = grass.gisenv()['MAPSET']
        channel_list = grass.mlist_grouped(type = 'rast', pattern = pattern, mapset = mapset)[mapset]
        grass.debug('Channel list: %s' % ','.join(channel_list))
        
        if len(channel_list) < 2: # test for single band data
            self.channel_suffixes = []
        else:
            self.channel_suffixes = channel_list # ???
        
        grass.debug('Channel suffixes: %s' % ','.join(self.channel_suffixes))
        
        # add to the list of all suffixes
        self.suffixes = self.suffixes + self.channel_suffixes
        self.suffixes.sort()
        
        # get last suffix
        if len(self.channel_suffixes) > 0:
            last_suffix = self.channel_suffixes[-1]
        else:
            last_suffix = ''

        # find the alpha layer
        if self.flags['k']:
            alphalayer = tmpmapname + last_suffix
        else:
            alphalayer = tmpmapname + '.alpha'
        
        # test to see if the alpha map exists
        if not grass.find_file(element = 'cell', name = alphalayer)['name']:
            alphalayer = ''
        
        # calculate the new maps:
        for suffix in self.channel_suffixes:
            grass.debug("alpha=%s MAPsfx=%s%s tmpname=%s%s" % \
                            (alphalayer, map, suffix, tmpmapname, suffix))
            if alphalayer:
                # Use alpha channel for nulls: problem: I've seen a map
                # where alpha was 1-255; 1 being transparent. what to do?
                # (Geosci Australia Gold layer, format=tiff)
                if grass.run_command('r.mapcalc',
                                     quiet = True,
                                     expression = "%s%s = if(%s, %s%s, null())" % \
                                         (map, sfx, alphalayer, tmpmapname, sfx)) != 0:
                    grass.fatal(_('r.mapcalc failed'))
            else:
                if grass.run_command('g.copy',
                                     quiet = True,
                                     rast = "%s%s,%s%s" % \
                                         (tmpmapname, suffix, map, suffix)) != 0:
                    grass.fatal(_('g.copy failed'))
        
            # copy the color tables
            if grass.run_command('r.colors',
                                 quiet = True,
                                 map = map + suffix,
                                 rast = tmpmapname + suffix) != 0:
                grass.fatal(_('g.copy failed'))

            # make patch lists
            suffix = suffix.replace('.', '_')
            # this is a hack to make the patch lists empty:
            if self.tiler == 0:
                self.patches = []
            self.patches = self.patches.append(map + suffix)
    
        # if no suffix, processing is simple (e.g. elevation has only 1
        # band)
        if len(channel_list) < 2:
            # run r.mapcalc to crop to region
            if grass.run_command('r.mapcalc',
                                 quiet = True,
                                 expression = "%s = %s" % \
                                     (map, tmpmapname)) != 0:
                grass.fatal(_('r.mapcalc failed'))
            
            if grass.run_command('r.colors',
                                 quiet = True,
                                 map = map,
                                 rast = tmpmapname) != 0:
                grass.fatal(_('r.colors failed'))
    
        # remove the old channels
        if grass.run_command('g.remove',
                             quiet = True,
                             rast = ','.join(channel_list)) != 0:
            grass.fatal(_('g.remove failed'))