Example #1
0
def main():
    input = options['input']
    layer = options['layer']
    type = options['type']
    olayer = options['olayer']
    host = options['host']
    port = options['port']
    database = options['database']
    #schema = options['schema']
    user = options['user']
    password = options['password']

    # Construct dsn string
    dsn = "PG:dbname=" + database
    if host:
        dsn += " host=" + host
    if port:
        dsn += " port=" + port
    if user:
        dsn += " user="******" password="******"PostgreSQL", dsn=dsn, olayer=olayer) != 0:
        grass.fatal("Cannot export vector to database.")
Example #2
0
def install_extension():
    gisbase = os.getenv('GISBASE')
    if not gisbase:
        grass.fatal(_('$GISBASE not defined'))
    
    if options['extension'] in get_installed_extensions(force = True):
        grass.warning(_("Extension <%s> already installed. Re-installing...") % options['extension'])
    
    if sys.platform == "win32":
        ret = install_extension_win()
    else:
        ret = install_extension_other()
    
    if ret != 0:
        grass.warning(_('Installation failed, sorry. Please check above error messages.'))
    else:
        grass.message(_("Updating metadata file..."))
        install_extension_xml()
        grass.message(_("Installation of <%s> successfully finished") % options['extension'])
    
    # cleanup build cruft
    if not flags['s']:
        tidy_citizen()
    
    if not os.environ.has_key('GRASS_ADDON_PATH') or \
            not os.environ['GRASS_ADDON_PATH']:
        grass.warning(_('This add-on module will not function until you set the '
                        'GRASS_ADDON_PATH environment variable (see "g.manual variables")'))
Example #3
0
def main():
    input = options['input']
    output = options['output']
    stddeviation = options['stddeviation']

    if grass.run_command('v.kernel', input=input, stddeviation=stddeviation, output=output ) != 0:
         grass.fatal("Cannot run v.kernel.")
Example #4
0
def install_extension_win(name):
    ### do not use hardcoded url - http://wingrass.fsv.cvut.cz/grassXX/addonsX.X.X
    grass.message(_("Downloading precompiled GRASS Addons <%s>...") % options['extension'])
    url = "http://wingrass.fsv.cvut.cz/grass%(major)s%(minor)s/addons/grass-%(major)s.%(minor)s.%(patch)s/" % \
        { 'major' : version[0], 'minor' : version[1], 'patch' : version[2]}
    
    grass.debug("url=%s" % url, 1)

    try:
        f = urlopen(url + '/' + name + '.zip', proxies=PROXIES)

        # create addons dir if not exists
        if not os.path.exists(options['prefix']):
            os.mkdir(options['prefix'])

        # download data
        fo = tempfile.TemporaryFile()
        fo.write(f.read())
        zfobj = zipfile.ZipFile(fo)
        for name in zfobj.namelist():
            if name.endswith('/'):
                d = os.path.join(options['prefix'], name)
                if not os.path.exists(d):
                    os.mkdir(d)
            else:
                outfile = open(os.path.join(options['prefix'], name), 'wb')
                outfile.write(zfobj.read(name))
                outfile.close()

        fo.close()
    except HTTPError:
        grass.fatal(_("GRASS Addons <%s> not found") % name)

    return 0
Example #5
0
def test_spatial_extent_intersection():
    # Generate the extents

    A = SpatialExtent(
        north=80, south=20, east=60, west=10, bottom=-50, top=50)
    A.print_info()
    B = SpatialExtent(
        north=80, south=20, east=60, west=10, bottom=-50, top=50)
    B.print_info()
    C = A.intersect(B)
    C.print_info()

    if C.get_north() != B.get_north() or C.get_south() != B.get_south() or \
        C.get_west() != B.get_west() or C.get_east() != B.get_east() or \
        C.get_bottom() != B.get_bottom() or C.get_top() != B.get_top():
        core.fatal("Wrong intersection computation")

    B = SpatialExtent(
        north=40, south=30, east=60, west=10, bottom=-50, top=50)
    B.print_info()
    C = A.intersect(B)
    C.print_info()

    if C.get_north() != B.get_north() or C.get_south() != B.get_south() or \
       C.get_west() != B.get_west() or C.get_east() != B.get_east() or \
       C.get_bottom() != B.get_bottom() or C.get_top() != B.get_top():
        core.fatal("Wrong intersection computation")

    B = SpatialExtent(
        north=40, south=30, east=60, west=30, bottom=-50, top=50)
    B.print_info()
    C = A.intersect(B)
    C.print_info()

    if C.get_north() != B.get_north() or C.get_south() != B.get_south() or \
       C.get_west() != B.get_west() or C.get_east() != B.get_east() or \
       C.get_bottom() != B.get_bottom() or C.get_top() != B.get_top():
        core.fatal("Wrong intersection computation")

    B = SpatialExtent(
        north=40, south=30, east=60, west=30, bottom=-30, top=50)
    B.print_info()
    C = A.intersect(B)
    C.print_info()

    if C.get_north() != B.get_north() or C.get_south() != B.get_south() or \
       C.get_west() != B.get_west() or C.get_east() != B.get_east() or \
       C.get_bottom() != B.get_bottom() or C.get_top() != B.get_top():
        core.fatal("Wrong intersection computation")

    B = SpatialExtent(
        north=40, south=30, east=60, west=30, bottom=-30, top=30)
    B.print_info()
    C = A.intersect(B)
    C.print_info()

    if C.get_north() != B.get_north() or C.get_south() != B.get_south() or \
       C.get_west() != B.get_west() or C.get_east() != B.get_east() or \
       C.get_bottom() != B.get_bottom() or C.get_top() != B.get_top():
        core.fatal("Wrong intersection computation")
Example #6
0
def project(file, source, dest):
    """Projects point (x, y) using projector"""
    errors = 0
    points = []
    try:
        ret = gcore.read_command('m.proj',
                                 quiet=True,
                                 flags='d',
                                 proj_in=source['proj'],
                                 proj_out=dest['proj'],
                                 sep=';',
                                 input=file)
    except CalledModuleError:
        gcore.fatal(cs2cs + ' failed')

    if not ret:
        gcore.fatal(cs2cs + ' failed')

    for line in ret.splitlines():
        if "*" in line:
            errors += 1
        else:
            p_x2, p_y2, p_z2 = list(map(float, line.split(';')))
            points.append((p_x2 / dest['scale'], p_y2 / dest['scale']))

    return points, errors
Example #7
0
    def nowarp_import(self, file, map):
        """Import raster file into GRASS"""
        if grass.run_command('r.in.gdal',
                             quiet = True,
                             flags = 'o' + self.gdal_flags,
                             input = file,
                             output = map) != 0:
            grass.fatal(_('r.in.gdal failed'))

        # get a list of channels:
        pattern = map + '*'
        grass.debug("pattern: %s" % ','.join(pattern))
        mapset = grass.gisenv()['MAPSET']
        channel_list = grass.mlist_grouped(type = 'rast', pattern = pattern, mapset = mapset)
        grass.debug("channel list: %s" % ','.join(channel_list))

        if len(channel_list) < 2:
            # test for single band data
            self.channel_suffixes = []
        else:
            self.channel_suffixes = channel_list # ???
    
        # add to the list of all suffixes:
        self.suffixes = self.suffixes + channel.suffixes
        self.suffixes.sort()
    
        for suffix in self.channel_suffixes:
            # make patch lists
            suffix = suffix.replace('.', '_')
            # this is a hack to make the patch lists empty
            if self.tiler == 0:
                self.patches = []
            self.patches = self.patches.append(map + suffix)
Example #8
0
def main():
    options, flags = gcore.parser()
    if options["input"]:
        map_name = gcore.find_file(name=options["input"], element="cell")["fullname"]
        if not map_name:
            gcore.fatal(_("Raster map <{raster}> not found").format(raster=options["input"]))

    # define display driver (avoid 'no graphics device selected' error at start up)
    driver = UserSettings.Get(group="display", key="driver", subkey="type")
    if driver == "png":
        os.environ["GRASS_RENDER_IMMEDIATE"] = "png"
    else:
        os.environ["GRASS_RENDER_IMMEDIATE"] = "cairo"

    # launch application
    app = wx.App()
    if not CheckWxVersion([2, 9]):
        wx.InitAllImageHandlers()

    # show main frame
    giface = StandaloneGrassInterface()
    frame = ExampleMapFrame(parent=None, giface=giface)
    if options["input"]:
        giface.WriteLog(_("Loading raster map <{raster}>...").format(raster=map_name))
        frame.SetLayer(map_name)

    frame.Show()
    app.MainLoop()
Example #9
0
def install_extension_win():
    ### TODO: do not use hardcoded url - http://wingrass.fsv.cvut.cz/grassXX/addonsX.X.X
    version = grass.version()['version'].split('.')
    grass.message(_("Downloading precompiled GRASS Addons <%s>...") % options['extension'])
    url = "http://wingrass.fsv.cvut.cz/grass%s%s/addons" % (version[0], version[1])
    if version[0] == '6' and version[1] == '4':
        url += '/grass-%s.%s.%s' % (version[0], version[1], version[2])
    grass.debug("url=%s" % url, 1)
    
    try:
        f = urlopen(url + '/' + options['extension'] + '.zip')
        
        # create addons dir if not exists
        if not os.path.exists(options['prefix']):
            os.mkdir(options['prefix'])
        
        # download data
        fo = tempfile.TemporaryFile()
        fo.write(f.read())
        zfobj = zipfile.ZipFile(fo)
        for name in zfobj.namelist():
            if name.endswith('/'):
                d = os.path.join(options['prefix'], name)
                if not os.path.exists(d):
                    os.mkdir(d)
            else:
                outfile = open(os.path.join(options['prefix'], name), 'wb')
                outfile.write(zfobj.read(name))
                outfile.close()
        
        fo.close()
    except HTTPError:
        grass.fatal(_("GRASS Addons <%s> not found") % options['extension'])
    
    return 0
Example #10
0
def create_frame(monitor, frame, at, overwrite=False):
    lines = read_monitor_file(monitor)
    # get width and height of the monitor
    width = height = -1
    for line in lines:
        try:
            if 'WIDTH' in line:
                width = int(line.split('=', 1)[1].rsplit(' ', 1)[0])
            elif 'HEIGHT' in line:
                height = int(line.split('=', 1)[1].rsplit(' ', 1)[0])
        except:
            pass

    if width < 0 or height < 0:
        fatal(_("Invalid monitor size: %dx%d") % (width, height))

    if not overwrite:
        lines.append(calculate_frame(frame, at, width, height))
    else:
        for idx in range(len(lines)):
            line = lines[idx]
            if 'FRAME' not in line:
                continue
            if get_frame_name(line) == frame:
                lines[idx] = calculate_frame(frame, at, width, height)

    write_monitor_file(monitor, lines)
def import_files(directory, pattern):
    maps = []
    if pattern:
        from glob import glob
        files = glob('{dir}{sep}{pat}'.format(
            dir=directory, sep=os.path.sep, pat=pattern)
        )
    else:
        files = map(lambda x: os.path.join(directory, x),
                    os.listdir(directory)
        )

    start = time.time()

    import_module = Module('v.in.ascii', separator='space', z=3, flags='tbz',
                           overwrite=overwrite(), quiet=True, run_=False)
    try:
        for f in files:
            basename = os.path.basename(f)
            mapname = os.path.splitext(basename)[0]
            maps.append(mapname)
            message("Importing <{}>...".format(f))
            import_task = deepcopy(import_module)
            queue.put(import_task(input=f, output=mapname))
        queue.wait()
    except CalledModuleError:
        return sys.exit(1)

    if not maps:
        fatal("No input files found")

    message("Import finished in {:.0f} sec".format(time.time() - start))

    return maps
Example #12
0
def create_dir(path):
    if os.path.isdir(path):
        return

    try:
        os.makedirs(path)
    except OSError, e:
        grass.fatal(_("Unable to create '%s': %s") % (path, e))
Example #13
0
def start_man(entry):
    path = os.path.join(gisbase, 'docs', 'man', 'man1', entry + '.1')
    if not os.path.exists(path) and os.getenv('GRASS_ADDON_BASE'):
        path = os.path.join(os.getenv('GRASS_ADDON_BASE'), 'docs', 'man', 'man1', entry + '.1')
    
    for ext in ['', '.gz', '.bz2']:
        if os.path.exists(path + ext):
            os.execlp('man', 'man', path + ext)
            grass.fatal(_("Error starting 'man' for '%s'") % path)
    grass.fatal(_("No manual page entry for '%s'") % entry)
Example #14
0
def write_monitor_file(monitor, lines, ftype='env'):
    mfile = check_monitor_file(monitor, ftype)

    try:
        fd = open(mfile, 'w')
    except IOError as e:
        fatal(_("Unable to get monitor info. %s"), e)

    fd.writelines(lines)
    fd.close()
Example #15
0
def start_man(entry):
    path = os.path.join(gisbase, "man", "man1", entry + ".1")
    if not os.path.exists(path) and os.getenv("GRASS_ADDON_PATH"):
        path = os.path.join(os.getenv("GRASS_ADDON_PATH"), "man", "man1", entry + ".1")

    for ext in ["", ".gz", ".bz2"]:
        if os.path.exists(path + ext):
            os.execlp("man", "man", path + ext)
            grass.fatal(_("Error starting 'man' for <%s>") % path)
    grass.fatal(_("No manual page entry for <%s>") % entry)
Example #16
0
def main():
    # check dependecies
    if sys.platform != "win32":
        check_progs()
    
    # define path
    if flags['s']:
        options['prefix'] = os.environ['GISBASE']
    if options['prefix'] == '$GRASS_ADDON_PATH':
        if not os.environ.has_key('GRASS_ADDON_PATH') or \
                not os.environ['GRASS_ADDON_PATH']:
            major_version = int(grass.version()['version'].split('.', 1)[0])
            grass.warning(_("GRASS_ADDON_PATH is not defined, "
                            "installing to ~/.grass%d/addons/") % major_version)
            options['prefix'] = os.path.join(os.environ['HOME'], '.grass%d' % major_version, 'addons')
        else:
            path_list = os.environ['GRASS_ADDON_PATH'].split(os.pathsep)
            if len(path_list) < 1:
                grass.fatal(_("Invalid GRASS_ADDON_PATH value - '%s'") % os.environ['GRASS_ADDON_PATH'])
            if len(path_list) > 1:
                grass.warning(_("GRASS_ADDON_PATH has more items, using first defined - '%s'") % path_list[0])
            options['prefix'] = path_list[0]
    
    # list available modules
    if flags['l'] or flags['c'] or flags['g']:
        list_available_extensions()
        return 0
    elif flags['a']:
        elist = get_installed_extensions()
        if elist:
            grass.message(_("List of installed extensions:"))
            sys.stdout.write('\n'.join(elist))
            sys.stdout.write('\n')
        else:
            grass.info(_("No extension installed"))
        return 0
    else:
        if not options['extension']:
            grass.fatal(_('You need to define an extension name or use -l'))

    if flags['d']:
        if options['operation'] != 'add':
            grass.warning(_("Flag 'd' is relevant only to 'operation=add'. Ignoring this flag."))
        else:
            global remove_tmpdir
            remove_tmpdir = False
    
    if options['operation'] == 'add':
        check_dirs()
        install_extension()
    else: # remove
        remove_extension(flags['f'])
    
    return 0
Example #17
0
def check_style_files(fil):
    dist_file   = os.path.join(os.getenv('GISBASE'), 'docs', 'html', fil)
    addons_file = os.path.join(options['prefix'], 'docs', 'html', fil)
    
    if os.path.isfile(addons_file):
	return
    
    try:
        shutil.copyfile(dist_file,addons_file)
    except OSError, e:
        grass.fatal(_("Unable to create '%s': %s") % (addons_file, e))
Example #18
0
def main():
    options, unused = gcore.parser()

    drape_map = options['color']
    relief_map = options['shade']
    brighten = options['brighten']

    try:
        gcore.run_command('d.his', hue=drape_map, intensity=relief_map,
                          brighten=brighten)
    except CalledModuleError:
        gcore.fatal(_("Module %s failed. Check the above error messages.") % 'd.his')
Example #19
0
def test_increment_datetime_by_string():

    dt = datetime(2001, 9, 1, 0, 0, 0)
    string = "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"

    dt1 = datetime(2003,2,18,12,5,0)
    dt2 = increment_datetime_by_string(dt, string)

    delta = dt1 -dt2

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("increment computation is wrong")
Example #20
0
def remove_extension(force = False):
    # try to read XML metadata file first
    fXML = os.path.join(options['prefix'], 'modules.xml')
    name = options['extension']
    if name not in get_installed_extensions():
        grass.warning(_("Extension <%s> not found") % name)
    
    if force:
        grass.verbose(_("List of removed files:"))
    else:
        grass.info(_("Files to be removed (use flag 'f' to force removal):"))
    
    if os.path.exists(fXML):
        f = open(fXML, 'r')
        tree = etree.fromstring(f.read())
        flist = []
        for task in tree.findall('task'):
            if name == task.get('name', default = '') and \
                    task.find('binary') is not None:
                for f in task.find('binary').findall('file'):
                    flist.append(f.text)
        
        if flist:
            removed = False
            err = list()
            for fpath in flist:
                try:
                    if force:
                        grass.verbose(fpath)
                        os.remove(fpath)
                        removed = True
                    else:
                        print fpath
                except OSError:
                    err.append((_("Unable to remove file '%s'") % fpath))
            if force and not removed:
                grass.fatal(_("Extension <%s> not found") % options['extension'])
            
            if err:
                for e in err:
                    grass.error(e)
        else:
            remove_extension_std(force)
    else:
        remove_extension_std(force)

    if force:
        grass.message(_("Updating metadata file..."))
        remove_extension_xml()
        grass.message(_("Extension <%s> successfully uninstalled.") % options['extension'])
    else:
        grass.warning(_("Extension <%s> not removed.\n"
                        "Re-run '%s' with 'f' flag to force removal") % (options['extension'], 'g.extension'))
Example #21
0
def main():
    options, flags = gcore.parser()
    gisenv = gcore.gisenv()
    if 'MONITOR' in gisenv:
        cmd_file = gcore.parse_command('d.mon', flags='g')['cmd']
        dout_cmd = 'd.out.file'
        for param, val in options.items():
            if val:
                dout_cmd += " {param}={val}".format(param=param, val=val)
        with open(cmd_file, "a") as file_:
            file_.write(dout_cmd)
    else:
        gcore.fatal(_("No graphics device selected. Use d.mon to select graphics device."))
Example #22
0
def remove_extension():
    # is module available?
    bin_dir = os.path.join(options['prefix'], 'bin', options['extension'])
    scr_dir = os.path.join(options['prefix'], 'scripts', options['extension'])
    if not os.path.exists(bin_dir) and not os.path.exists(scr_dir):
        grass.fatal(_("Module <%s> not found") % options['extension'])
    
    for f in [bin_dir, scr_dir,
              os.path.join(options['prefix'], 'docs', 'html', options['extension'] + '.html'),
              os.path.join(options['prefix'], 'man', 'man1', options['extension'] + '.1')]:
        grass.try_remove(f)
    
    grass.message(_("Module <%s> successfully uninstalled") % options['extension'])
Example #23
0
def main():
    options, flags = gcore.parser()
    gisenv = gcore.gisenv()
    if "MONITOR" in gisenv:
        cmd_file = gisenv["MONITOR_{monitor}_CMDFILE".format(monitor=gisenv["MONITOR"].upper())]
        dout_cmd = "d.what.vect"
        for param, val in options.iteritems():
            if val:
                dout_cmd += " {param}={val}".format(param=param, val=val)
        with open(cmd_file, "a") as file_:
            file_.write(dout_cmd)
    else:
        gcore.fatal(_("No graphics device selected. Use d.mon to select graphics device."))
Example #24
0
def calculate_frame(frame, at, width, height):
    try:
        b, t, l, r = map(float, at.split(','))
    except:
        fatal(_("Invalid frame position: %s") % at)

    top = height - (t / 100. * height)
    bottom = height - (b / 100. * height)
    left = l / 100. * width
    right = r / 100. * width
    
    return 'GRASS_RENDER_FRAME=%d,%d,%d,%d # %s%s' % \
        (top, bottom, left, right, frame, '\n')
Example #25
0
def main():
    env = grass.gisenv()
    mon = env.get('MONITOR', None)
    if not mon:
        grass.fatal(_("No monitor selected. Run `d.mon` to select monitor."))
    
    monCmd = env.get('MONITOR_%s_CMDFILE' % mon.upper())
    if not monCmd:
        grass.fatal(_("No cmd file found for monitor <%s>") % mon)

    try:
        fd = open(monCmd, 'r')
        cmdList = fd.readlines()
        
        grass.run_command('d.erase')
        
        for cmd in cmdList:
            grass.call(split(cmd))
    except IOError as e:
        grass.fatal(_("Unable to open file '%s' for reading. Details: %s") % \
                        (monCmd, e))
    
    fd.close()
    
    # restore cmd file
    try:
        fd = open(monCmd, "w")
        fd.writelines(cmdList)
    except IOError as e:
        grass.fatal(_("Unable to open file '%s' for writing. Details: %s") % \
                        (monCmd, e))
    
    return 0
Example #26
0
def remove_modules(mlist, force = False):
    # try to read XML metadata file first
    fXML = os.path.join(options['prefix'], 'modules.xml')
    installed = get_installed_modules()

    if os.path.exists(fXML):
        f = open(fXML, 'r')
        tree = etree.fromstring(f.read())
        f.close()
    else:
        tree = None

    for name in mlist:
        if name not in installed:
            # try even if module does not seem to be available,
            # as the user may be trying to get rid of left over cruft
            grass.warning(_("Extension <%s> not found") % name)

        if tree is not None:
            flist = []
            for task in tree.findall('task'):
                if name == task.get('name') and \
                        task.find('binary') is not None:
                    for f in task.find('binary').findall('file'):
                        flist.append(f.text)
                    break

            if flist:
                removed = False
                err = list()
                for fpath in flist:
                    try:
                        if force:
                            grass.verbose(fpath)
                            removed = True
                            os.remove(fpath)
                        else:
                            print fpath
                    except OSError:
                        err.append((_("Unable to remove file '%s'") % fpath))
                if force and not removed:
                    grass.fatal(_("Extension <%s> not found") % name)

                if err:
                    for e in err:
                        grass.error(e)
            else:
                remove_extension_std(name, force)
        else:
            remove_extension_std(name, force)
Example #27
0
def read_monitor_file(monitor, ftype='env'):
    mfile = check_monitor_file(monitor, ftype)
    try:
        fd = open(mfile, 'r')
    except IOError as e:
        fatal(_("Unable to get monitor info. %s"), e)

    lines = []
    for line in fd.readlines():
        lines.append(line)

    fd.close()

    return lines
Example #28
0
    def GetData(self, idx, server, query, output):
        """Download data"""
        grass.message(_("Downloading data (tile %d)...") % idx)
        grass.verbose("Requesting data: %s" % self.options['mapserver'])

        if not self.flags['g']: # -> post
            try:
                urllib.urlretrieve(server, output, data = query)
            except IOError:
                grass.fatal(_("Failed while downloading the data"))
            
            if not os.path.exists(output):
                grass.fatal(_("Failed while downloading the data"))
            
            # work-around for brain-dead ArcIMS servers which want POST-data as part of the GET URL
            #   (this is technically allowed by OGC WMS def v1.3.0 Sec6.3.4)
            if os.path.getsize(output) == 0:
                grass.warning(_("Downloaded image file is empty -- trying another method"))
                self.flags['g'] = True
            
        if self.flags['g']: # -> get
            try:
                urllib.urlretrieve(server + '?' + query, output, data = None)
            except IOError:
                grass.fatal(_("Failed while downloading the data"))
            
            if not os.path.exists(output) or os.path.getsize(output) == 0:
                grass.fatal(_("Failed while downloading the data"))
Example #29
0
def main():
    mon = grass.gisenv().get('MONITOR', None)
    if not mon:
        grass.fatal(_("No graphics device selected. Use d.mon to select graphics device."))

    monCmd = grass.parse_command('d.mon', flags='g').get('cmd', None)
    if not monCmd or not os.path.isfile(monCmd):
        grass.fatal(_("Unable to open file '%s'") % monCmd)

    try:
        fd = open(monCmd, 'r')
        cmdList = fd.readlines()

        grass.run_command('d.erase')

        for cmd in cmdList:
            grass.call(split(cmd))
    except IOError as e:
        grass.fatal(_("Unable to open file '%s' for reading. Details: %s") %
                    (monCmd, e))

    fd.close()

    # restore cmd file
    try:
        fd = open(monCmd, "w")
        fd.writelines(cmdList)
    except IOError as e:
        grass.fatal(_("Unable to open file '%s' for writing. Details: %s") %
                    (monCmd, e))

    return 0
Example #30
0
def increment_datetime_by_string(mydate, increment, mult = 1):
    """Return a new datetime object incremented with the provided relative dates specified as string.
       Additional a multiplier can be specified to multiply the increment bevor adding to the provided datetime object.

       @mydate A datetime object to incremented
       @increment A string providing increment information:
                  The string may include comma separated values of type seconds, minutes, hours, days, weeks, months and years
                  Example: Increment the datetime 2001-01-01 00:00:00 with "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"
                  will result in the datetime 2003-02-18 12:05:00
        @mult A multiplier, default is 1
    """

    if increment:

        seconds = 0
        minutes = 0
        hours = 0
        days = 0
        weeks = 0
        months = 0
        years = 0

        inclist = []
        # Split the increment string
        incparts = increment.split(",")
        for incpart in incparts:
            inclist.append(incpart.strip().split(" "))

        for inc in inclist:
            if inc[1].find("seconds") >= 0:
                seconds = mult * int(inc[0])
            elif inc[1].find("minutes") >= 0:
                minutes = mult * int(inc[0])
            elif inc[1].find("hours") >= 0:
                hours = mult * int(inc[0])
            elif inc[1].find("days") >= 0:
                days = mult * int(inc[0])
            elif inc[1].find("weeks") >= 0:
                weeks = mult * int(inc[0])
            elif inc[1].find("months") >= 0:
                months = mult * int(inc[0])
            elif inc[1].find("years") >= 0:
                years = mult * int(inc[0])
            else:
                core.fatal("Wrong increment format: " + increment)

        return increment_datetime(mydate, years, months, weeks, days, hours, minutes, seconds)
    
    return mydate
Example #31
0
def main():
    # get currently selected monitor
    monitor = check_monitor()
    if not monitor:
        fatal(
            _("No graphics device selected. Use d.mon to select graphics device."
              ))

    if flags['e']:
        # remove frames and erase monitor and exit
        erase(monitor)
        return

    if flags['p']:
        # print current frame and exit
        print_frames(monitor, current_only=True)
        return

    if flags['a']:
        # print all frames including their position and exit
        print_frames(monitor, current_only=False, full=True)
        return

    found = find_frame(monitor, options['frame'])
    if not found:
        if not flags['c']:
            fatal(
                _("Frame <%s> doesn't exist, exiting. "
                  "To create a new frame use '-c' flag.") % options['frame'])
        else:
            if not options['at']:
                fatal(_("Required parameter <%s> not set") % "at")
            # create new frame if not exists
            create_frame(monitor, options['frame'], options['at'])
    else:
        if os.getenv('GRASS_OVERWRITE', '0') == '1':
            warning(
                _("Frame <%s> already exists and will be overwritten") %
                options['frame'])
            create_frame(monitor,
                         options['frame'],
                         options['at'],
                         overwrite=True)
        else:
            if options['at']:
                warning(
                    _("Frame <%s> already found. An existing frame can be overwritten by '%s' flag."
                      ) % (options['frame'], "--overwrite"))

    # select givenframe
    select_frame(monitor, options['frame'])
Example #32
0
def main():
    host = options['host']
    port = options['port']
    database = options['database']
    schema = options['schema']
    user = options['user']
    password = options['password']

    # Test connection
    conn = "dbname=" + database
    if host:
        conn += ",host=" + host
    if port:
        conn += ",port=" + port

    # Unfortunately we cannot test untill user/password is set
    if user or password:
        print "Setting login (db.login) ... "
        sys.stdout.flush()
        if grass.run_command('db.login',
                             driver="pg",
                             database=conn,
                             user=user,
                             password=password) != 0:
            grass.fatal("Cannot login")

    # Try to connect
    print "Testing connection ..."
    sys.stdout.flush()
    if grass.run_command('db.select',
                         quiet=True,
                         flags='c',
                         driver="pg",
                         database=conn,
                         sql="select version()") != 0:
        if user or password:
            print "Deleting login (db.login) ..."
            sys.stdout.flush()
            if grass.run_command('db.login',
                                 quiet=True,
                                 driver="pg",
                                 database=conn,
                                 user="",
                                 password="") != 0:
                print "Cannot delete login."
                sys.stdout.flush()
        grass.fatal("Cannot connect to database.")

    if grass.run_command(
            'db.connect', driver="pg", database=conn, schema=schema) != 0:
        grass.fatal("Cannot connect to database.")
Example #33
0
def main():
    # check if input file exists
    infile = options['input']
    gfile = grass.find_file(infile, element='vector')
    if not gfile['name']:
        grass.fatal(_("Vector map <%s> not found") % infile)
    # create tempfile and write ascii file of input
    temp_in = grass.tempfile()
    try:
        grass.run_command('v.out.ascii',
                          overwrite=True,
                          input=gfile['name'],
                          output=temp_in)
    except CalledModuleError:
        grass.fatal(_("Failed to export vector in a temporary file"))
    # x and y of median point
    medx, medy = point_med(temp_in)
    try_remove(temp_in)
    # prepare the output
    output = "%f|%f" % (medx, medy)
    map_name = options['output']
    overwrite = os.getenv('GRASS_OVERWRITE')
    # if output is not set return to stdout
    if map_name == '-':
        grass.message(output)
    # else
    else:
        # output file
        goutfile = grass.find_file(name=map_name, element='vector', mapset='.')
        # output tempfile
        temp_out = grass.tempfile()
        file_out = open(temp_out, 'w')
        file_out.write(output)
        file_out.close()
        # output file exists and not overwrite
        if goutfile['file'] and overwrite != '1':
            grass.fatal(_("Vector map <%s> already exists") % map_name)
        # output file exists and overwrite
        elif goutfile['file'] and overwrite == '1':
            grass.warning(
                _("Vector map <%s> already exists and will be overwritten") %
                map_name)
            grass.run_command('v.in.ascii',
                              overwrite=True,
                              input=temp_out,
                              output=map_name)
        # output file not exists
        else:
            grass.run_command('v.in.ascii', input=temp_out, output=map_name)
        try_remove(temp_out)
Example #34
0
def main():
    mon = grass.gisenv().get("MONITOR", None)
    if not mon:
        grass.fatal(
            _("No graphics device selected. Use d.mon to select graphics device."
              ))

    monCmd = grass.parse_command("d.mon", flags="g").get("cmd", None)
    if not monCmd or not os.path.isfile(monCmd):
        grass.fatal(_("Unable to open file '%s'") % monCmd)

    try:
        fd = open(monCmd, "r")
        cmdList = fd.readlines()

        grass.run_command("d.erase")

        for cmd in cmdList:
            if cmd.startswith("#"):
                continue
            grass.call(split(cmd))
    except IOError as e:
        grass.fatal(
            _("Unable to open file '%s' for reading. Details: %s") %
            (monCmd, e))

    fd.close()

    # restore cmd file
    try:
        fd = open(monCmd, "w")
        fd.writelines(cmdList)
    except IOError as e:
        grass.fatal(
            _("Unable to open file '%s' for writing. Details: %s") %
            (monCmd, e))

    return 0
Example #35
0
def start_browser(entry):
    if (
        browser
        and browser not in ("xdg-open", "start")
        and not grass.find_program(browser)
    ):
        grass.fatal(_("Browser '%s' not found") % browser)

    if flags["o"]:
        major, minor, patch = grass.version()["version"].split(".")
        url_path = "https://grass.osgeo.org/grass%s%s/manuals/%s.html" % (
            major,
            minor,
            entry,
        )
        if urlopen(url_path).getcode() != 200:
            url_path = "https://grass.osgeo.org/grass%s%s/manuals/addons/%s.html" % (
                major,
                minor,
                entry,
            )
    else:
        path = os.path.join(gisbase, "docs", "html", entry + ".html")
        if not os.path.exists(path) and os.getenv("GRASS_ADDON_BASE"):
            path = os.path.join(
                os.getenv("GRASS_ADDON_BASE"), "docs", "html", entry + ".html"
            )

        if not os.path.exists(path):
            grass.fatal(_("No HTML manual page entry for '%s'") % entry)

        url_path = "file://" + path

    if browser and browser not in ("xdg-open", "start"):
        webbrowser.register(browser_name, None)

    grass.verbose(
        _("Starting browser '%(browser)s' for manual" " entry '%(entry)s'...")
        % dict(browser=browser_name, entry=entry)
    )

    try:
        webbrowser.open(url_path)
    except:
        grass.fatal(
            _("Error starting browser '%(browser)s' for HTML file" " '%(path)s'")
            % dict(browser=browser, path=path)
        )
Example #36
0
    def __init__(self):
        self.ppath = os.path.dirname(os.path.abspath(__file__))

        self.confDirPath = os.path.join(
            os.getenv("GRASS_ADDON_BASE"),
            "etc",
            "wx.metadata",
            "config",
        )
        path = os.path.join("wx.metadata", "config")
        self.connResources = get_lib_path(
            modname=path,
            libname="connections_resources.xml",
        )
        if self.connResources is None:
            grass.fatal("Fatal error: library < {} > not found".format(path), )
        else:
            self.connResources = os.path.join(
                self.connResources,
                "connections_resources.xml",
            )

        self.configureLibPath = get_lib_path(
            modname=path,
            libname="init_md.txt",
        )
        if self.configureLibPath is None:
            grass.fatal("Fatal error: library < {} > not found".format(path), )

        path = os.path.join("wx.metadata", "profiles")
        self.profilesLibPath = get_lib_path(
            modname=path,
            libname="basicProfile.xml",
        )
        if self.profilesLibPath is None:
            grass.fatal("Fatal error: library < %s > not found" % path)

        self.lib_path = os.path.normpath(
            os.path.join(self.configureLibPath, os.pardir), )
Example #37
0
    def __init__(self):
        self.ppath = os.path.dirname(os.path.abspath(__file__))

        self.confDirPath = os.path.join(
            os.getenv('GRASS_ADDON_BASE'),
            'etc', 'wx.metadata', 'config',
        )
        path = os.path.join('wx.metadata', 'config')
        self.connResources = get_lib_path(
            modname=path, libname='connections_resources.xml',
        )
        if self.connResources is None:
            grass.fatal(
                "Fatal error: library < {} > not found".format(
                    path),
            )
        else:
            self.connResources = os.path.join(
                self.connResources, 'connections_resources.xml',
            )

        self.configureLibPath = get_lib_path(
            modname=path, libname='init_md.txt',
        )
        if self.configureLibPath is None:
            grass.fatal(
                "Fatal error: library < {} > not found".format(path),
            )

        path = os.path.join('wx.metadata', 'profiles')
        self.profilesLibPath = get_lib_path(
            modname=path, libname='basicProfile.xml',
        )
        if self.profilesLibPath is None:
            grass.fatal("Fatal error: library < %s > not found" % path)

        self.lib_path = os.path.normpath(
            os.path.join(self.configureLibPath, os.pardir),
        )
Example #38
0
def start_browser(entry):
    if browser and \
       browser not in ('xdg-open', 'start') and \
       not grass.find_program(browser):
        grass.fatal(_("Browser '%s' not found") % browser)

    if flags['o']:
        major, minor, patch = grass.version()['version'].split('.')
        url_path = 'http://grass.osgeo.org/grass%s%s/manuals/%s.html' % (
            major, minor, entry)
        if urlopen(url_path).getcode() != 200:
            url_path = 'http://grass.osgeo.org/grass%s%s/manuals/addons/%s.html' % (
                major, minor, entry)
    else:
        path = os.path.join(gisbase, 'docs', 'html', entry + '.html')
        if not os.path.exists(path) and os.getenv('GRASS_ADDON_BASE'):
            path = os.path.join(os.getenv('GRASS_ADDON_BASE'), 'docs', 'html',
                                entry + '.html')

        if not os.path.exists(path):
            grass.fatal(_("No HTML manual page entry for '%s'") % entry)

        url_path = 'file://' + path

    if browser and browser not in ('xdg-open', 'start'):
        webbrowser.register(browser_name, None)

    grass.verbose(
        _("Starting browser '%(browser)s' for manual"
          " entry '%(entry)s'...") % dict(browser=browser_name, entry=entry))

    try:
        webbrowser.open(url_path)
    except:
        grass.fatal(
            _("Error starting browser '%(browser)s' for HTML file"
              " '%(path)s'") % dict(browser=browser, path=path))
Example #39
0
def main():
    infile = options['input']
    compression_off = flags['c']

    global basedir
    basedir = grass.tempdir()

    # check if vector map exists
    gfile = grass.find_file(infile, element='vector')
    if not gfile['name']:
        grass.fatal(_("Vector map <%s> not found") % infile)

    # check if input vector map is in the native format
    if vector.vector_info(gfile['fullname'])['format'] != 'native':
        grass.fatal(
            _("Unable to pack vector map <%s>. Only native format supported.")
            % gfile['fullname'])

    # split the name if there is the mapset name
    if infile.find('@'):
        infile = infile.split('@')[0]

    # output name
    if options['output']:
        outfile = options['output']
    else:
        outfile = infile + '.pack'

    # check if exists the output file
    if os.path.exists(outfile):
        if os.getenv('GRASS_OVERWRITE'):
            grass.warning(
                _("Pack file <%s> already exists and will be overwritten") %
                outfile)
            try_remove(outfile)
        else:
            grass.fatal(_("option <%s>: <%s> exists.") % ("output", outfile))

    # prepare for packing
    grass.verbose(_("Packing <%s>...") % (gfile['fullname']))

    # write tar file, optional compression
    if compression_off:
        tar = tarfile.open(name=outfile, mode='w:')
    else:
        tar = tarfile.open(name=outfile, mode='w:gz')
    tar.add(gfile['file'], infile)

    # check if exist a db connection for the vector
    db_vect = vector.vector_db(gfile['fullname'])
    if not db_vect:
        grass.verbose(
            _('There is not database connected with vector map <%s>') %
            gfile['fullname'])
    else:
        # for each layer connection save a table in sqlite database
        sqlitedb = os.path.join(basedir, 'db.sqlite')
        for i, dbconn in db_vect.items():
            grass.run_command('db.copy',
                              from_driver=dbconn['driver'],
                              from_database=dbconn['database'],
                              from_table=dbconn['table'],
                              to_driver='sqlite',
                              to_database=sqlitedb,
                              to_table=dbconn['table'])
        tar.add(sqlitedb, 'db.sqlite')

    # add to the tar file the PROJ files to check when unpack file
    gisenv = grass.gisenv()
    for support in ['INFO', 'UNITS', 'EPSG']:
        path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'],
                            'PERMANENT', 'PROJ_' + support)
        if os.path.exists(path):
            tar.add(path, 'PROJ_' + support)
    tar.close()

    grass.message(
        _("Pack file <%s> created") % os.path.join(os.getcwd(), outfile))
Example #40
0
def main():
    layers = options['map'].split(',')

    if len(layers) < 2:
        grass.error(_("At least 2 maps are required"))

    tmpfile = grass.tempfile()

    for map in layers:
        if not grass.find_file(map, element='cell')['file']:
            grass.fatal(_("Raster map <%s> not found") % map)

    grass.write_command('d.text',
                        color='black',
                        size=4,
                        line=1,
                        stdin="CORRELATION")

    os.environ['GRASS_PNG_READ'] = 'TRUE'

    colors = "red black blue green gray violet".split()
    line = 2
    iloop = 0
    jloop = 0
    for iloop, i in enumerate(layers):
        for jloop, j in enumerate(layers):
            if i != j and iloop <= jloop:
                color = colors[0]
                colors = colors[1:]
                colors.append(color)
                grass.write_command('d.text',
                                    color=color,
                                    size=4,
                                    line=line,
                                    stdin="%s %s" % (i, j))
                line += 1

                ofile = file(tmpfile, 'w')
                grass.run_command('r.stats',
                                  flags='cnA',
                                  input=(i, j),
                                  stdout=ofile)
                ofile.close()

                ifile = file(tmpfile, 'r')
                first = True
                for l in ifile:
                    f = l.rstrip('\r\n').split(' ')
                    x = float(f[0])
                    y = float(f[1])
                    if first:
                        minx = maxx = x
                        miny = maxy = y
                        first = False
                    if minx > x: minx = x
                    if maxx < x: maxx = x
                    if miny > y: miny = y
                    if maxy < y: maxy = y
                ifile.close()

                kx = 100.0 / (maxx - minx + 1)
                ky = 100.0 / (maxy - miny + 1)

                p = grass.feed_command('d.graph', color=color)
                ofile = p.stdin

                ifile = file(tmpfile, 'r')
                for l in ifile:
                    f = l.rstrip('\r\n').split(' ')
                    x = float(f[0])
                    y = float(f[1])
                    ofile.write("icon + 0.1 %f %f\n" % ((x - minx + 1) * kx,
                                                        (y - miny + 1) * ky))
                ifile.close()

                ofile.close()
                p.wait()

    try_remove(tmpfile)
Example #41
0
def main():
    infile = options['input']

    # create temporary directory
    global tmp_dir
    tmp_dir = grass.tempdir()
    grass.debug('tmp_dir = %s' % tmp_dir)

    # check if the input file exists
    if not os.path.exists(infile):
        grass.fatal(_("File <%s> not found") % infile)

    # copy the files to tmp dir
    input_base = os.path.basename(infile)
    shutil.copyfile(infile, os.path.join(tmp_dir, input_base))
    os.chdir(tmp_dir)
    tar = tarfile.TarFile.open(name=input_base, mode='r')
    try:
        data_name = tar.getnames()[0]
    except:
        grass.fatal(_("Pack file unreadable"))

    if flags['p']:
        # print proj info and exit
        try:
            for fname in ['PROJ_INFO', 'PROJ_UNITS']:
                f = tar.extractfile(fname)
                sys.stdout.write(f.read())
        except KeyError:
            grass.fatal(
                _("Pack file unreadable: file '{}' missing".format(fname)))
        tar.close()

        return 0

    # set the output name
    if options['output']:
        map_name = options['output']
    else:
        map_name = data_name

    # grass env
    gisenv = grass.gisenv()
    mset_dir = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'],
                            gisenv['MAPSET'])

    new_dir = os.path.join(mset_dir, 'vector', map_name)

    gfile = grass.find_file(name=map_name, element='vector', mapset='.')
    overwrite = os.getenv('GRASS_OVERWRITE')
    if gfile['file'] and overwrite != '1':
        grass.fatal(_("Vector map <%s> already exists") % map_name)
    elif overwrite == '1' and gfile['file']:
        grass.warning(
            _("Vector map <%s> already exists and will be overwritten") %
            map_name)
        grass.run_command('g.remove',
                          flags='f',
                          quiet=True,
                          type='vector',
                          name=map_name)
        shutil.rmtree(new_dir, True)

    # extract data
    tar.extractall()
    tar.close()
    if os.path.exists(os.path.join(data_name, 'coor')):
        pass
    elif os.path.exists(os.path.join(data_name, 'cell')):
        grass.fatal(
            _("This GRASS GIS pack file contains raster data. Use "
              "r.unpack to unpack <%s>" % map_name))
    else:
        grass.fatal(_("Pack file unreadable"))

    # check projection compatibility in a rather crappy way
    loc_proj = os.path.join(mset_dir, '..', 'PERMANENT', 'PROJ_INFO')
    loc_proj_units = os.path.join(mset_dir, '..', 'PERMANENT', 'PROJ_UNITS')

    skip_projection_check = False
    if not os.path.exists(os.path.join(tmp_dir, 'PROJ_INFO')):
        if os.path.exists(loc_proj):
            grass.fatal(
                _("PROJ_INFO file is missing, unpack vector map in XY (unprojected) location."
                  ))
        skip_projection_check = True  # XY location

    if not skip_projection_check:
        diff_result_1 = diff_result_2 = None
        if not grass.compare_key_value_text_files(filename_a=os.path.join(
                tmp_dir, 'PROJ_INFO'),
                                                  filename_b=loc_proj,
                                                  proj=True):
            diff_result_1 = diff_files(os.path.join(tmp_dir, 'PROJ_INFO'),
                                       loc_proj)

        if not grass.compare_key_value_text_files(filename_a=os.path.join(
                tmp_dir, 'PROJ_UNITS'),
                                                  filename_b=loc_proj_units,
                                                  units=True):
            diff_result_2 = diff_files(os.path.join(tmp_dir, 'PROJ_UNITS'),
                                       loc_proj_units)

        if diff_result_1 or diff_result_2:
            if flags['o']:
                grass.warning(
                    _("Projection information does not match. Proceeding..."))
            else:
                if diff_result_1:
                    grass.warning(
                        _("Difference between PROJ_INFO file of packed map "
                          "and of current location:\n{diff}").format(
                              diff=''.join(diff_result_1)))
                if diff_result_2:
                    grass.warning(
                        _("Difference between PROJ_UNITS file of packed map "
                          "and of current location:\n{diff}").format(
                              diff=''.join(diff_result_2)))
                grass.fatal(
                    _("Projection of dataset does not appear to match current location."
                      " In case of no significant differences in the projection definitions,"
                      " use the -o flag to ignore them and use"
                      " current location definition."))

    # new db
    fromdb = os.path.join(tmp_dir, 'db.sqlite')
    # copy file
    shutil.copytree(data_name, new_dir)
    # exist fromdb
    if os.path.exists(fromdb):
        # the db connection in the output mapset
        dbconn = grassdb.db_connection(force=True)
        todb = dbconn['database']
        # return all tables
        list_fromtable = grass.read_command('db.tables',
                                            driver='sqlite',
                                            database=fromdb).splitlines()

        # return the list of old connection for extract layer number and key
        dbln = open(os.path.join(new_dir, 'dbln'), 'r')
        dbnlist = dbln.readlines()
        dbln.close()
        # check if dbf or sqlite directory exists
        if dbconn['driver'] == 'dbf' and not os.path.exists(
                os.path.join(mset_dir, 'dbf')):
            os.mkdir(os.path.join(mset_dir, 'dbf'))
        elif dbconn['driver'] == 'sqlite' and not os.path.exists(
                os.path.join(mset_dir, 'sqlite')):
            os.mkdir(os.path.join(mset_dir, 'sqlite'))
        # for each old connection
        for t in dbnlist:
            # it split the line of each connection, to found layer number and key
            if len(t.split('|')) != 1:
                values = t.split('|')
            else:
                values = t.split(' ')

            from_table = values[1]
            layer = values[0].split('/')[0]
            # we need to take care about the table name in case of several layer
            if options["output"]:
                if len(dbnlist) > 1:
                    to_table = "%s_%s" % (map_name, layer)
                else:
                    to_table = map_name
            else:
                to_table = from_table

            grass.verbose(
                _("Coping table <%s> as table <%s>") % (from_table, to_table))

            # copy the table in the default database
            try:
                grass.run_command('db.copy',
                                  to_driver=dbconn['driver'],
                                  to_database=todb,
                                  to_table=to_table,
                                  from_driver='sqlite',
                                  from_database=fromdb,
                                  from_table=from_table)
            except CalledModuleError:
                grass.fatal(
                    _("Unable to copy table <%s> as table <%s>") %
                    (from_table, to_table))

            grass.verbose(
                _("Connect table <%s> to vector map <%s> at layer <%s>") %
                (to_table, map_name, layer))

            # and connect the new tables with the right layer
            try:
                grass.run_command('v.db.connect',
                                  flags='o',
                                  quiet=True,
                                  driver=dbconn['driver'],
                                  database=todb,
                                  map=map_name,
                                  key=values[2],
                                  layer=layer,
                                  table=to_table)
            except CalledModuleError:
                grass.fatal(
                    _("Unable to connect table <%s> to vector map <%s>") %
                    (to_table, map_name))

    grass.message(_("Vector map <%s> successfully unpacked") % map_name)
Example #42
0
    def saveXML(self,
                path=None,
                xml_out_name=None,
                wxparent=None,
                overwrite=False):
        ''' Save init. record  of OWSLib objects to ISO XML file'''

        # if  output file name is None, use map name and add suffix
        if xml_out_name is None:
            xml_out_name = self.type + '_' + str(
                self.map).partition('@')[0]  # + self.schema_type
        if not xml_out_name.lower().endswith('.xml'):
            xml_out_name += '.xml'

        if not path:
            path = os.path.join(mdutil.pathToMapset(), 'metadata')
            if not os.path.exists(path):
                print os.makedirs(path)
        path = os.path.join(path, xml_out_name)

        # generate xml using jinja profiles
        env = Environment(loader=FileSystemLoader(self.dirpath))
        env.globals.update(zip=zip)
        profile = env.get_template(self.profilePath)
        iso_xml = profile.render(md=self.md)

        # write xml to flat file
        if wxparent != None:
            if os.path.isfile(path):
                if mdutil.yesNo(
                        wxparent,
                        'Metadata file exists. Do you want to overwrite metadata file: %s?'
                        % path, 'Overwrite dialog'):
                    try:
                        xml_file = open(path, "w")
                        xml_file.write(iso_xml)
                        xml_file.close()
                        Module('g.message',
                               message='metadata exported: \n\
                                                     %s' % (str(path)))
                    except IOError as e:
                        print "I/O error({0}): {1}".format(e.errno, e.strerror)
                        grass.fatal('ERROR: cannot write xml to file')
                return path
            else:
                try:
                    xml_file = open(path, "w")
                    xml_file.write(iso_xml)
                    xml_file.close()
                    Module('g.message',
                           message='metadata exported: \n\
                                                     %s' % (str(path)))
                except IOError as e:
                    print "I/O error({0}): {1}".format(e.errno, e.strerror)
                    grass.fatal('ERROR: cannot write xml to file')
                    # sys.exit()
                return path
        else:
            if os.path.isfile(path):
                Module('g.message', message='Metadata file exists: %s' % path)
                if overwrite:
                    try:
                        xml_file = open(path, "w")
                        xml_file.write(iso_xml)
                        xml_file.close()
                        Module('g.message',
                               message='Metadata file has been overwritten')
                        return path
                    except IOError as e:
                        print "I/O error({0}): {1}".format(e.errno, e.strerror)
                        grass.fatal('error: cannot write xml to file')
                else:
                    Module('g.message',
                           message='For overwriting use flag -overwrite')
                    return False
            else:
                try:
                    xml_file = open(path, "w")
                    xml_file.write(iso_xml)
                    xml_file.close()
                    Module('g.message',
                           message='Metadata file has been exported')
                    return path

                except IOError as e:
                    print "I/O error({0}): {1}".format(e.errno, e.strerror)
                    grass.fatal('error: cannot write xml to file')
Example #43
0
 def isMapExist(self):
     '''Check if the map is in current mapset'''
     self.mapset = grass.find_file(self.map, self.type)['mapset']
     if not self.mapset:
         grass.fatal(
             _("Map <%s> does not exist in current mapset") % self.map)
Example #44
0
def main():
    infile = options['input']
    compression_off = flags['c']
    mapset = None
    if '@' in infile:
        infile, mapset = infile.split('@')

    if options['output']:
        outfile_path, outfile_base = os.path.split(
            os.path.abspath(options['output']))
    else:
        outfile_path, outfile_base = os.path.split(
            os.path.abspath(infile + ".pack"))

    outfile = os.path.join(outfile_path, outfile_base)

    global tmp
    tmp = grass.tempdir()
    tmp_dir = os.path.join(tmp, infile)
    os.mkdir(tmp_dir)
    grass.debug('tmp_dir = %s' % tmp_dir)

    gfile = grass.find_file(name=infile, element='cell', mapset=mapset)
    if not gfile['name']:
        grass.fatal(_("Raster map <%s> not found") % infile)

    if os.path.exists(outfile):
        if os.getenv('GRASS_OVERWRITE'):
            grass.warning(
                _("Pack file <%s> already exists and will be overwritten") %
                outfile)
            try_remove(outfile)
        else:
            grass.fatal(_("option <output>: <%s> exists.") % outfile)

    grass.message(_("Packing <%s> to <%s>...") % (gfile['fullname'], outfile))
    basedir = os.path.sep.join(
        os.path.normpath(gfile['file']).split(os.path.sep)[:-2])
    olddir = os.getcwd()

    # copy elements
    info = grass.parse_command('r.info', flags='e', map=infile)
    vrt_files = {}
    if info['maptype'] == 'virtual':
        map_file = grass.find_file(
            name=infile,
            element='cell_misc',
        )
        if map_file['file']:
            vrt = os.path.join(map_file['file'], 'vrt')
            if os.path.exists(vrt):
                with open(vrt, 'r') as f:
                    for r in f.readlines():
                        map, mapset = r.split('@')
                        map_basedir = os.path.sep.join(
                            os.path.normpath(map_file['file'], ).split(
                                os.path.sep)[:-2], )
                        vrt_files[map] = map_basedir

    for element in [
            'cats',
            'cell',
            'cellhd',
            'cell_misc',
            'colr',
            'fcell',
            'hist',
    ]:
        path = os.path.join(basedir, element, infile)
        if os.path.exists(path):
            grass.debug('copying %s' % path)
            if os.path.isfile(path):
                shutil.copyfile(
                    path,
                    os.path.join(tmp_dir, element),
                )
            else:
                shutil.copytree(
                    path,
                    os.path.join(tmp_dir, element),
                )

        # Copy vrt files
        if vrt_files:
            for f in vrt_files.keys():
                f_tmp_dir = os.path.join(tmp, f)
                if not os.path.exists(f_tmp_dir):
                    os.mkdir(f_tmp_dir)
                path = os.path.join(vrt_files[f], element, f)
                if os.path.exists(path):
                    grass.debug("copying vrt file {}".format(path))
                    if os.path.isfile(path):
                        shutil.copyfile(
                            path,
                            os.path.join(f_tmp_dir, element),
                        )
                    else:
                        shutil.copytree(
                            path,
                            os.path.join(f_tmp_dir, element),
                        )

    if not os.listdir(tmp_dir):
        grass.fatal(_("No raster map components found"))

    # copy projection info
    # (would prefer to use g.proj*, but this way is 5.3 and 5.7 compat)
    gisenv = grass.gisenv()
    for support in ['INFO', 'UNITS', 'EPSG']:
        path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'],
                            'PERMANENT', 'PROJ_' + support)
        if os.path.exists(path):
            shutil.copyfile(path, os.path.join(tmp_dir, 'PROJ_' + support))

    # pack it all up
    os.chdir(tmp)
    if compression_off:
        tar = tarfile.TarFile.open(name=outfile_base, mode='w:')
    else:
        tar = tarfile.TarFile.open(name=outfile_base, mode='w:gz')
    tar.add(infile, recursive=True)
    if vrt_files:
        for f in vrt_files.keys():
            tar.add(f, recursive=True)

    tar.close()
    try:
        shutil.move(outfile_base, outfile)
    except shutil.Error as e:
        grass.fatal(e)

    os.chdir(olddir)

    grass.verbose(_("Raster map saved to '%s'" % outfile))
Example #45
0
def test_adjust_datetime_to_granularity():

    # First test
    print("Test 1")
    dt = datetime(2001, 8, 8, 12, 30, 30)
    result = adjust_datetime_to_granularity(dt, "5 seconds")
    correct = datetime(2001, 8, 8, 12, 30, 30)

    delta = correct - result

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("Granularity adjustment computation is wrong %s" % (delta))

    # Second test
    print("Test 2")
    result = adjust_datetime_to_granularity(dt, "20 minutes")
    correct = datetime(2001, 8, 8, 12, 30, 0)

    delta = correct - result

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("Granularity adjustment computation is wrong %s" % (delta))

    # Third test
    print("Test 2")
    result = adjust_datetime_to_granularity(dt, "20 minutes")
    correct = datetime(2001, 8, 8, 12, 30, 0)

    delta = correct - result

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("Granularity adjustment computation is wrong %s" % (delta))

    # 4. test
    print("Test 4")
    result = adjust_datetime_to_granularity(dt, "3 hours")
    correct = datetime(2001, 8, 8, 12, 0, 0)

    delta = correct - result

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("Granularity adjustment computation is wrong %s" % (delta))

    # 5. test
    print("Test 5")
    result = adjust_datetime_to_granularity(dt, "5 days")
    correct = datetime(2001, 8, 8, 0, 0, 0)

    delta = correct - result

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("Granularity adjustment computation is wrong %s" % (delta))

    # 6. test
    print("Test 6")
    result = adjust_datetime_to_granularity(dt, "2 weeks")
    correct = datetime(2001, 8, 6, 0, 0, 0)

    delta = correct - result

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("Granularity adjustment computation is wrong %s" % (delta))

    # 7. test
    print("Test 7")
    result = adjust_datetime_to_granularity(dt, "6 months")
    correct = datetime(2001, 8, 1, 0, 0, 0)

    delta = correct - result

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("Granularity adjustment computation is wrong %s" % (delta))

    # 8. test
    print("Test 8")
    result = adjust_datetime_to_granularity(dt, "2 years")
    correct = datetime(2001, 1, 1, 0, 0, 0)

    delta = correct - result

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("Granularity adjustment computation is wrong %s" % (delta))

    # 9. test
    print("Test 9")
    result = adjust_datetime_to_granularity(
        dt, "2 years, 3 months, 5 days, 3 hours, 3 minutes, 2 seconds")
    correct = datetime(2001, 8, 8, 12, 30, 30)

    delta = correct - result

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("Granularity adjustment computation is wrong %s" % (delta))

    # 10. test
    print("Test 10")
    result = adjust_datetime_to_granularity(dt, "3 months, 5 days, 3 minutes")
    correct = datetime(2001, 8, 8, 12, 30, 0)

    delta = correct - result

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("Granularity adjustment computation is wrong %s" % (delta))

    # 11. test
    print("Test 11")
    result = adjust_datetime_to_granularity(dt, "3 weeks, 5 days")
    correct = datetime(2001, 8, 8, 0, 0, 0)

    delta = correct - result

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("Granularity adjustment computation is wrong %s" % (delta))
Example #46
0
def test_spatial_relations():
    # Generate the extents

    A = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
    A.print_info()
    B = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "equivalent":
        core.fatal("Wrong spatial relation: %s" % (relation))

    B = SpatialExtent(north=70, south=20, east=60, west=10, bottom=-50, top=50)
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "cover":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "cover":
        core.fatal("Wrong spatial relation: %s" % (relation))

    B = SpatialExtent(north=70, south=30, east=60, west=10, bottom=-50, top=50)
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "cover":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "cover":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = B.spatial_relation_2d(A)
    print(relation)
    if relation != "covered":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = B.spatial_relation(A)
    print(relation)
    if relation != "covered":
        core.fatal("Wrong spatial relation: %s" % (relation))

    B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=50)
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "cover":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = B.spatial_relation_2d(A)
    print(relation)
    if relation != "covered":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "cover":
        core.fatal("Wrong spatial relation: %s" % (relation))

    B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)

    relation = B.spatial_relation(A)
    print(relation)
    if relation != "covered":
        core.fatal("Wrong spatial relation: %s" % (relation))

    B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-50, top=50)
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "contain":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "cover":
        core.fatal("Wrong spatial relation: %s" % (relation))

    B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=50)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "cover":
        core.fatal("Wrong spatial relation: %s" % (relation))

    B = SpatialExtent(north=70, south=30, east=50, west=20, bottom=-40, top=40)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "contain":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = B.spatial_relation(A)
    print(relation)
    if relation != "in":
        core.fatal("Wrong spatial relation: %s" % (relation))

    B = SpatialExtent(north=90, south=30, east=50, west=20, bottom=-40, top=40)
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "overlap":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "overlap":
        core.fatal("Wrong spatial relation: %s" % (relation))

    B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=40)
    A.print_info()
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "in":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "overlap":
        core.fatal("Wrong spatial relation: %s" % (relation))

    B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-40, top=60)
    A.print_info()
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "overlap":
        core.fatal("Wrong spatial relation: %s" % (relation))

    B = SpatialExtent(north=90, south=5, east=70, west=5, bottom=-60, top=60)
    A.print_info()
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "in":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
    A.print_info()
    B = SpatialExtent(north=60, south=20, east=60, west=10, bottom=-50, top=50)
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=60, south=40, east=60, west=10, bottom=-50, top=50)
    A.print_info()
    B = SpatialExtent(north=80, south=60, east=60, west=10, bottom=-50, top=50)
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=60, west=40, bottom=-50, top=50)
    A.print_info()
    B = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
    A.print_info()
    B = SpatialExtent(north=90, south=30, east=60, west=40, bottom=-50, top=50)
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
    A.print_info()
    B = SpatialExtent(north=70, south=50, east=60, west=40, bottom=-50, top=50)
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
    A.print_info()
    B = SpatialExtent(north=60, south=20, east=60, west=40, bottom=-50, top=50)
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
    A.print_info()
    B = SpatialExtent(north=40, south=20, east=60, west=40, bottom=-50, top=50)
    B.print_info()

    relation = A.spatial_relation_2d(B)
    print(relation)
    if relation != "disjoint":
        core.fatal("Wrong spatial relation: %s" % (relation))

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "disjoint":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
    A.print_info()
    B = SpatialExtent(north=60, south=20, east=60, west=40, bottom=-60, top=60)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=40, west=20, bottom=-50, top=50)
    A.print_info()
    B = SpatialExtent(north=90, south=30, east=60, west=40, bottom=-40, top=40)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
    A.print_info()
    B = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
    A.print_info()
    B = SpatialExtent(north=80, south=50, east=60, west=30, bottom=-50, top=0)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
    A.print_info()
    B = SpatialExtent(north=70, south=50, east=50, west=30, bottom=-50, top=0)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
    A.print_info()
    B = SpatialExtent(north=90, south=30, east=70, west=10, bottom=-50, top=0)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
    A.print_info()
    B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=-50, top=0)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
    A.print_info()
    B = SpatialExtent(north=80, south=40, east=60, west=20, bottom=0, top=50)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
    A.print_info()
    B = SpatialExtent(north=80, south=50, east=60, west=30, bottom=0, top=50)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
    A.print_info()
    B = SpatialExtent(north=70, south=50, east=50, west=30, bottom=0, top=50)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
    A.print_info()
    B = SpatialExtent(north=90, south=30, east=70, west=10, bottom=0, top=50)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))

    A = SpatialExtent(north=80, south=40, east=60, west=20, bottom=-50, top=0)
    A.print_info()
    B = SpatialExtent(north=70, south=30, east=50, west=10, bottom=0, top=50)
    B.print_info()

    relation = A.spatial_relation(B)
    print(relation)
    if relation != "meet":
        core.fatal("Wrong spatial relation: %s" % (relation))
Example #47
0
def main():
    shell = flags['g']
    serial = flags['s']
    bands = options['input'].split(',')

    if len(bands) < 4:
        grass.fatal(_("At least four input maps required"))

    output = options['output']
    # calculate the Stddev for TM bands
    grass.message(_("Calculating standard deviations for all bands..."))
    stddev = {}

    if serial:
        for band in bands:
            grass.verbose("band %d" % band)
            s = grass.read_command('r.univar', flags='g', map=band)
            kv = parse_key_val(s)
            stddev[band] = float(kv['stddev'])
    else:
        # run all bands in parallel
        if "WORKERS" in os.environ:
            workers = int(os.environ["WORKERS"])
        else:
            workers = len(bands)
        proc = {}
        pout = {}

        # spawn jobs in the background
        n = 0
        for band in bands:
            proc[band] = grass.pipe_command('r.univar', flags='g', map=band)
            if n % workers is 0:
                # wait for the ones launched so far to finish
                for bandp in bands[:n]:
                    if not proc[bandp].stdout.closed:
                        pout[bandp] = proc[bandp].communicate()[0]
                    proc[bandp].wait()
            n = n + 1

        # wait for jobs to finish, collect the output
        for band in bands:
            if not proc[band].stdout.closed:
                pout[band] = proc[band].communicate()[0]
            proc[band].wait()

    # parse the results
        for band in bands:
            kv = parse_key_val(pout[band])
            stddev[band] = float(kv['stddev'])

    grass.message(_("Calculating Correlation Matrix..."))
    correlation = {}
    s = grass.read_command('r.covar',
                           flags='r',
                           map=[band for band in bands],
                           quiet=True)

    # We need to skip the first line, since r.covar prints the number of values
    lines = s.splitlines()
    for i, row in zip(bands, lines[1:]):
        for j, cell in zip(bands, row.split(' ')):
            correlation[i, j] = float(cell)

    # Calculate all combinations
    grass.message(_("Calculating OIF for all band combinations..."))

    oif = []
    for p in perms(bands):
        oif.append((oifcalc(stddev, correlation, *p), p))
    oif.sort(reverse=True)

    grass.verbose(
        _("The Optimum Index Factor analysis result "
          "(best combination shown first):"))

    if shell:
        fmt = "%s,%s,%s:%.4f\n"
    else:
        fmt = "%s, %s, %s:  %.4f\n"

    if not output or output == '-':
        for v, p in oif:
            sys.stdout.write(fmt % (p + (v, )))
    else:
        outf = file(output, 'w')
        for v, p in oif:
            outf.write(fmt % (p + (v, )))
        outf.close()
Example #48
0
def test_increment_datetime_by_string():

    # First test
    print("# Test 1")
    dt = datetime(2001, 9, 1, 0, 0, 0)
    string = "60 seconds, 4 minutes, 12 hours, 10 days, 1 weeks, 5 months, 1 years"

    dt1 = datetime(2003, 2, 18, 12, 5, 0)
    dt2 = increment_datetime_by_string(dt, string)

    print(dt)
    print(dt2)

    delta = dt1 - dt2

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("increment computation is wrong %s" % (delta))

    # Second test
    print("# Test 2")
    dt = datetime(2001, 11, 1, 0, 0, 0)
    string = "1 months"

    dt1 = datetime(2001, 12, 1)
    dt2 = increment_datetime_by_string(dt, string)

    print(dt)
    print(dt2)

    delta = dt1 - dt2

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("increment computation is wrong %s" % (delta))

    # Third test
    print("# Test 3")
    dt = datetime(2001, 11, 1, 0, 0, 0)
    string = "13 months"

    dt1 = datetime(2002, 12, 1)
    dt2 = increment_datetime_by_string(dt, string)

    print(dt)
    print(dt2)

    delta = dt1 - dt2

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("increment computation is wrong %s" % (delta))

    # 4. test
    print("# Test 4")
    dt = datetime(2001, 1, 1, 0, 0, 0)
    string = "72 months"

    dt1 = datetime(2007, 1, 1)
    dt2 = increment_datetime_by_string(dt, string)

    print(dt)
    print(dt2)

    delta = dt1 - dt2

    if delta.days != 0 or delta.seconds != 0:
        core.fatal("increment computation is wrong %s" % (delta))
Example #49
0
def test_compute_absolute_time_granularity():

    # First we test intervals
    print("Test 1")
    maps = []
    a = datetime(2001, 1, 1)
    increment = "1 year"
    for i in range(10):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 2")
    maps = []
    a = datetime(2001, 1, 1)
    increment = "3 years"
    for i in range(10):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 3")
    maps = []
    a = datetime(2001, 5, 1)
    increment = "1 month"
    for i in range(20):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 4")
    maps = []
    a = datetime(2001, 1, 1)
    increment = "3 months"
    for i in range(20):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 3")
    maps = []
    a = datetime(2001, 1, 1)
    increment = "1 day"
    for i in range(6):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 4")
    maps = []
    a = datetime(2001, 1, 14)
    increment = "14 days"
    for i in range(6):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 5")
    maps = []
    a = datetime(2001, 3, 1)
    increment = "1 month, 4 days"
    for i in range(20):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    increment = "1 day"
    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 6")
    maps = []
    a = datetime(2001, 2, 11)
    increment = "1 days, 1 hours"
    for i in range(20):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    increment = "25 hours"
    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 7")
    maps = []
    a = datetime(2001, 6, 12)
    increment = "6 hours"
    for i in range(20):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 8")
    maps = []
    a = datetime(2001, 1, 1)
    increment = "20 minutes"
    for i in range(20):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 9")
    maps = []
    a = datetime(2001, 1, 1)
    increment = "5 hours, 25 minutes"
    for i in range(20):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    increment = "325 minutes"
    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 10")
    maps = []
    a = datetime(2001, 1, 1)
    increment = "5 minutes, 30 seconds"
    for i in range(20):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    increment = "330 seconds"
    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 11")
    maps = []
    a = datetime(2001, 12, 31)
    increment = "60 minutes, 30 seconds"
    for i in range(24):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    increment = "3630 seconds"
    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 12")
    maps = []
    a = datetime(2001, 12, 31, 12, 30, 30)
    increment = "3600 seconds"
    for i in range(24):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        print(start)
        print(end)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    # Test absolute time points

    print("Test 13")
    maps = []
    a = datetime(2001, 12, 31, 12, 30, 30)
    increment = "3600 seconds"
    for i in range(24):
        start = increment_datetime_by_string(a, increment, i)
        end = None
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 14")
    maps = []
    a = datetime(2001, 12, 31, 0, 0, 0)
    increment = "20 days"
    for i in range(24):
        start = increment_datetime_by_string(a, increment, i)
        end = None
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 15")
    maps = []
    a = datetime(2001, 12, 1, 0, 0, 0)
    increment = "5 months"
    for i in range(24):
        start = increment_datetime_by_string(a, increment, i)
        end = None
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    # Test absolute time interval and points

    print("Test 16")
    maps = []
    a = datetime(2001, 12, 31, 12, 30, 30)
    increment = "3600 seconds"

    for i in range(24):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    a = datetime(2002, 2, 1, 12, 30, 30)
    for i in range(24):
        start = increment_datetime_by_string(a, increment, i)
        end = None
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))

    print("Test 17")
    maps = []
    a = datetime(2001, 1, 1)
    increment = "2 days"

    for i in range(8):
        start = increment_datetime_by_string(a, increment, i)
        end = increment_datetime_by_string(a, increment, i + 1)
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    a = datetime(2001, 2, 2)
    for i in range(8):
        start = increment_datetime_by_string(a, increment, i)
        end = None
        map = RasterDataset(None)
        map.set_absolute_time(start, end)
        maps.append(map)

    gran = compute_absolute_time_granularity(maps)
    if increment != gran:
        core.fatal("Wrong granularity reference %s != gran %s" %
                   (increment, gran))
Example #50
0
def test_compute_datetime_delta():

    print("Test 1")
    start = datetime(2001, 1, 1, 0, 0, 0)
    end = datetime(2001, 1, 1, 0, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["second"]
    correct = 0

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 2")
    start = datetime(2001, 1, 1, 0, 0, 14)
    end = datetime(2001, 1, 1, 0, 0, 44)

    comp = compute_datetime_delta(start, end)

    result = comp["second"]
    correct = 30

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 3")
    start = datetime(2001, 1, 1, 0, 0, 44)
    end = datetime(2001, 1, 1, 0, 1, 14)

    comp = compute_datetime_delta(start, end)

    result = comp["second"]
    correct = 30

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 4")
    start = datetime(2001, 1, 1, 0, 0, 30)
    end = datetime(2001, 1, 1, 0, 5, 30)

    comp = compute_datetime_delta(start, end)

    result = comp["second"]
    correct = 300

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 5")
    start = datetime(2001, 1, 1, 0, 0, 0)
    end = datetime(2001, 1, 1, 0, 1, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["minute"]
    correct = 1

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 6")
    start = datetime(2011, 10, 31, 0, 45, 0)
    end = datetime(2011, 10, 31, 1, 45, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["minute"]
    correct = 60

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 7")
    start = datetime(2011, 10, 31, 0, 45, 0)
    end = datetime(2011, 10, 31, 1, 15, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["minute"]
    correct = 30

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 8")
    start = datetime(2011, 10, 31, 0, 45, 0)
    end = datetime(2011, 10, 31, 12, 15, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["minute"]
    correct = 690

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 9")
    start = datetime(2011, 10, 31, 0, 0, 0)
    end = datetime(2011, 10, 31, 1, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["hour"]
    correct = 1

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 10")
    start = datetime(2011, 10, 31, 0, 0, 0)
    end = datetime(2011, 11, 1, 1, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["hour"]
    correct = 25

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 11")
    start = datetime(2011, 10, 31, 12, 0, 0)
    end = datetime(2011, 11, 1, 6, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["hour"]
    correct = 18

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 12")
    start = datetime(2011, 11, 1, 0, 0, 0)
    end = datetime(2011, 12, 1, 1, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["hour"]
    correct = 30 * 24 + 1

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 13")
    start = datetime(2011, 11, 1, 0, 0, 0)
    end = datetime(2011, 11, 5, 0, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["day"]
    correct = 4

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 14")
    start = datetime(2011, 10, 6, 0, 0, 0)
    end = datetime(2011, 11, 5, 0, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["day"]
    correct = 30

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 15")
    start = datetime(2011, 12, 2, 0, 0, 0)
    end = datetime(2012, 1, 1, 0, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["day"]
    correct = 30

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 16")
    start = datetime(2011, 1, 1, 0, 0, 0)
    end = datetime(2011, 2, 1, 0, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["month"]
    correct = 1

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 17")
    start = datetime(2011, 12, 1, 0, 0, 0)
    end = datetime(2012, 1, 1, 0, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["month"]
    correct = 1

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 18")
    start = datetime(2011, 12, 1, 0, 0, 0)
    end = datetime(2012, 6, 1, 0, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["month"]
    correct = 6

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 19")
    start = datetime(2011, 6, 1, 0, 0, 0)
    end = datetime(2021, 6, 1, 0, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["year"]
    correct = 10

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 20")
    start = datetime(2011, 6, 1, 0, 0, 0)
    end = datetime(2012, 6, 1, 12, 0, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["hour"]
    d = end - start
    correct = 12 + d.days * 24

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 21")
    start = datetime(2011, 6, 1, 0, 0, 0)
    end = datetime(2012, 6, 1, 12, 30, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["minute"]
    d = end - start
    correct = d.days * 24 * 60 + 12 * 60 + 30

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 22")
    start = datetime(2011, 6, 1, 0, 0, 0)
    end = datetime(2012, 6, 1, 12, 0, 5)

    comp = compute_datetime_delta(start, end)

    result = comp["second"]
    d = end - start
    correct = 5 + 60 * 60 * 12 + d.days * 24 * 60 * 60

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 23")
    start = datetime(2011, 6, 1, 0, 0, 0)
    end = datetime(2012, 6, 1, 0, 30, 0)

    comp = compute_datetime_delta(start, end)

    result = comp["minute"]
    d = end - start
    correct = 30 + d.days * 24 * 60

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))

    print("Test 24")
    start = datetime(2011, 6, 1, 0, 0, 0)
    end = datetime(2012, 6, 1, 0, 0, 5)

    comp = compute_datetime_delta(start, end)

    result = comp["second"]
    d = end - start
    correct = 5 + d.days * 24 * 60 * 60

    delta = correct - result

    if delta != 0:
        core.fatal("Compute datetime delta is wrong %s" % (delta))
Example #51
0
def test_map_list_sorting():

    map_list = []

    _map = RasterDataset(ident="1@a")
    _map.set_absolute_time(datetime(2001, 2, 1), datetime(2001, 3, 1))
    map_list.append(copy.copy(_map))
    _map = RasterDataset(ident="2@a")
    _map.set_absolute_time(datetime(2001, 1, 1), datetime(2001, 2, 1))
    map_list.append(copy.copy(_map))
    _map = RasterDataset(ident="3@a")
    _map.set_absolute_time(datetime(2001, 3, 1), datetime(2001, 4, 1))
    map_list.append(copy.copy(_map))

    print("Original")
    for _map in map_list:
        print(_map.get_temporal_extent_as_tuple()[0],
              _map.get_temporal_extent_as_tuple()[1])
    print("Sorted by start time")
    new_list = sorted(map_list, key=AbstractDatasetComparisonKeyStartTime)
    for _map in new_list:
        print(_map.get_temporal_extent_as_tuple()[0],
              _map.get_temporal_extent_as_tuple()[1])

    if new_list[0] != map_list[1]:
        core.fatal("Sorting by start time failed")
    if new_list[1] != map_list[0]:
        core.fatal("Sorting by start time failed")
    if new_list[2] != map_list[2]:
        core.fatal("Sorting by start time failed")

    print("Sorted by end time")
    new_list = sorted(map_list, key=AbstractDatasetComparisonKeyEndTime)
    for _map in new_list:
        print(_map.get_temporal_extent_as_tuple()[0],
              _map.get_temporal_extent_as_tuple()[1])

    if new_list[0] != map_list[1]:
        core.fatal("Sorting by end time failed")
    if new_list[1] != map_list[0]:
        core.fatal("Sorting by end time failed")
    if new_list[2] != map_list[2]:
        core.fatal("Sorting by end time failed")
Example #52
0
def test_temporal_topology_builder():
    map_listA = []

    _map = RasterDataset(ident="1@a")
    _map.set_absolute_time(datetime(2001, 1, 1), datetime(2001, 2, 1))
    map_listA.append(copy.copy(_map))
    _map = RasterDataset(ident="2@a")
    _map.set_absolute_time(datetime(2001, 2, 1), datetime(2001, 3, 1))
    map_listA.append(copy.copy(_map))
    _map = RasterDataset(ident="3@a")
    _map.set_absolute_time(datetime(2001, 3, 1), datetime(2001, 4, 1))
    map_listA.append(copy.copy(_map))
    _map = RasterDataset(ident="4@a")
    _map.set_absolute_time(datetime(2001, 4, 1), datetime(2001, 5, 1))
    map_listA.append(copy.copy(_map))
    _map = RasterDataset(ident="5@a")
    _map.set_absolute_time(datetime(2001, 5, 1), datetime(2001, 6, 1))
    map_listA.append(copy.copy(_map))

    tb = SpatioTemporalTopologyBuilder()
    tb.build(map_listA)

    count = 0
    for _map in tb:
        print("[%s]" % (_map.get_name()))
        _map.print_topology_info()
        if _map.get_id() != map_listA[count].get_id():
            core.fatal("Error building temporal topology <%s> != <%s>" %
                       (_map.get_id(), map_listA[count].get_id()))
        count += 1

    map_listB = []

    _map = RasterDataset(ident="1@b")
    _map.set_absolute_time(datetime(2001, 1, 14), datetime(2001, 3, 14))
    map_listB.append(copy.copy(_map))
    _map = RasterDataset(ident="2@b")
    _map.set_absolute_time(datetime(2001, 2, 1), datetime(2001, 4, 1))
    map_listB.append(copy.copy(_map))
    _map = RasterDataset(ident="3@b")
    _map.set_absolute_time(datetime(2001, 2, 14), datetime(2001, 4, 30))
    map_listB.append(copy.copy(_map))
    _map = RasterDataset(ident="4@b")
    _map.set_absolute_time(datetime(2001, 4, 2), datetime(2001, 4, 30))
    map_listB.append(copy.copy(_map))
    _map = RasterDataset(ident="5@b")
    _map.set_absolute_time(datetime(2001, 5, 1), datetime(2001, 5, 14))
    map_listB.append(copy.copy(_map))

    tb = SpatioTemporalTopologyBuilder()
    tb.build(map_listB)

    # Probing some relations
    if map_listB[0].get_overlapped()[0] != map_listB[1]:
        core.fatal("Error building temporal topology")
    if map_listB[0].get_overlapped()[1] != map_listB[2]:
        core.fatal("Error building temporal topology")
    if map_listB[2].get_contains()[0] != map_listB[3]:
        core.fatal("Error building temporal topology")
    if map_listB[3].get_during()[0] != map_listB[2]:
        core.fatal("Error building temporal topology")

    count = 0
    for _map in tb:
        print("[%s]" % (_map.get_map_id()))
        _map.print_topology_shell_info()
        if _map.get_id() != map_listB[count].get_id():
            core.fatal("Error building temporal topology <%s> != <%s>" %
                       (_map.get_id(), map_listB[count].get_id()))
        count += 1

    tb = SpatioTemporalTopologyBuilder()
    tb.build(map_listA, map_listB)

    count = 0
    for _map in tb:
        print("[%s]" % (_map.get_map_id()))
        _map.print_topology_shell_info()
        if _map.get_id() != map_listA[count].get_id():
            core.fatal("Error building temporal topology <%s> != <%s>" %
                       (_map.get_id(), map_listA[count].get_id()))
        count += 1

    count = 0
    for _map in map_listB:
        print("[%s]" % (_map.get_map_id()))
        _map.print_topology_shell_info()

    # Probing some relations
    if map_listA[3].get_follows()[0] != map_listB[1]:
        core.fatal("Error building temporal topology")
    if map_listA[3].get_precedes()[0] != map_listB[4]:
        core.fatal("Error building temporal topology")
    if map_listA[3].get_overlaps()[0] != map_listB[2]:
        core.fatal("Error building temporal topology")
    if map_listA[3].get_contains()[0] != map_listB[3]:
        core.fatal("Error building temporal topology")

    if map_listA[2].get_during()[0] != map_listB[1]:
        core.fatal("Error building temporal topology")
    if map_listA[2].get_during()[1] != map_listB[2]:
        core.fatal("Error building temporal topology")
Example #53
0
def main():
    # Take into account those extra pixels we'll be a addin'
    max_cols = int(options['maxcols']) - int(options['overlap'])
    max_rows = int(options['maxrows']) - int(options['overlap'])

    if max_cols == 0:
        gcore.fatal(
            _("It is not possible to set 'maxcols=%s' and "
              "'overlap=%s'. Please set maxcols>overlap" %
              (options['maxcols'], options['overlap'])))
    elif max_rows == 0:
        gcore.fatal(
            _("It is not possible to set 'maxrows=%s' and "
              "'overlap=%s'. Please set maxrows>overlap" %
              (options['maxrows'], options['overlap'])))
    # destination projection
    if not options['destproj']:
        dest_proj = gcore.read_command('g.proj', quiet=True,
                                       flags='jf').rstrip('\n')
        if not dest_proj:
            gcore.fatal(_('g.proj failed'))
    else:
        dest_proj = options['destproj']
    gcore.debug("Getting destination projection -> '%s'" % dest_proj)

    # projection scale
    if not options['destscale']:
        ret = gcore.parse_command('g.proj', quiet=True, flags='j')
        if not ret:
            gcore.fatal(_('g.proj failed'))

        if '+to_meter' in ret:
            dest_scale = ret['+to_meter'].strip()
        else:
            gcore.warning(
                _("Scale (%s) not found, assuming '1'") % '+to_meter')
            dest_scale = '1'
    else:
        dest_scale = options['destscale']
    gcore.debug('Getting destination projection scale -> %s' % dest_scale)

    # set up the projections
    srs_source = {
        'proj': options['sourceproj'],
        'scale': float(options['sourcescale'])
    }
    srs_dest = {'proj': dest_proj, 'scale': float(dest_scale)}

    if options['region']:
        gcore.run_command('g.region', quiet=True, region=options['region'])
    dest_bbox = gcore.region()
    gcore.debug('Getting destination region')

    # output field separator
    fs = separator(options['separator'])

    # project the destination region into the source:
    gcore.verbose('Projecting destination region into source...')
    dest_bbox_points = bboxToPoints(dest_bbox)

    dest_bbox_source_points, errors_dest = projectPoints(dest_bbox_points,
                                                         source=srs_dest,
                                                         dest=srs_source)

    if len(dest_bbox_source_points) == 0:
        gcore.fatal(
            _("There are no tiles available. Probably the output "
              "projection system it is not compatible with the "
              "projection of the current location"))

    source_bbox = pointsToBbox(dest_bbox_source_points)

    gcore.verbose('Projecting source bounding box into destination...')

    source_bbox_points = bboxToPoints(source_bbox)

    source_bbox_dest_points, errors_source = projectPoints(source_bbox_points,
                                                           source=srs_source,
                                                           dest=srs_dest)

    x_metric = 1 / dest_bbox['ewres']
    y_metric = 1 / dest_bbox['nsres']

    gcore.verbose('Computing length of sides of source bounding box...')

    source_bbox_dest_lengths = sideLengths(source_bbox_dest_points, x_metric,
                                           y_metric)

    # Find the skewedness of the two directions.
    # Define it to be greater than one
    # In the direction (x or y) in which the world is least skewed (ie north south in lat long)
    # Divide the world into strips. These strips are as big as possible contrained by max_
    # In the other direction do the same thing.
    # There's some recomputation of the size of the world that's got to come in
    # here somewhere.

    # For now, however, we are going to go ahead and request more data than is necessary.
    # For small regions far from the critical areas of projections this makes very little difference
    # in the amount of data gotten.
    # We can make this efficient for big regions or regions near critical
    # points later.

    bigger = []
    bigger.append(max(source_bbox_dest_lengths['x']))
    bigger.append(max(source_bbox_dest_lengths['y']))
    maxdim = (max_cols, max_rows)

    # Compute the number and size of tiles to use in each direction
    # I'm making fairly even sized tiles
    # They differer from each other in height and width only by one cell
    # I'm going to make the numbers all simpler and add this extra cell to
    # every tile.

    gcore.message(_('Computing tiling...'))
    tiles = [-1, -1]
    tile_base_size = [-1, -1]
    tiles_extra_1 = [-1, -1]
    tile_size = [-1, -1]
    tileset_size = [-1, -1]
    tile_size_overlap = [-1, -1]
    for i in range(len(bigger)):
        # make these into integers.
        # round up
        bigger[i] = int(bigger[i] + 1)
        tiles[i] = int((bigger[i] / maxdim[i]) + 1)
        tile_size[i] = tile_base_size[i] = int(bigger[i] / tiles[i])
        tiles_extra_1[i] = int(bigger[i] % tiles[i])
        # This is adding the extra pixel (remainder) to all of the tiles:
        if tiles_extra_1[i] > 0:
            tile_size[i] = tile_base_size[i] + 1
        tileset_size[i] = int(tile_size[i] * tiles[i])
        # Add overlap to tiles (doesn't effect tileset_size
        tile_size_overlap[i] = tile_size[i] + int(options['overlap'])

    gcore.verbose("There will be %d by %d tiles each %d by %d cells" %
                  (tiles[0], tiles[1], tile_size[0], tile_size[1]))

    ximax = tiles[0]
    yimax = tiles[1]

    min_x = source_bbox['w']
    min_y = source_bbox['s']
    max_x = source_bbox['e']
    max_y = source_bbox['n']
    span_x = (max_x - min_x)
    span_y = (max_y - min_y)

    xi = 0
    tile_bbox = {'w': -1, 's': -1, 'e': -1, 'n': -1}

    if errors_dest > 0:
        gcore.warning(
            _("During computation %i tiles could not be created" %
              errors_dest))

    while xi < ximax:
        tile_bbox['w'] = float(min_x) + (float(xi) * float(
            tile_size[0]) / float(tileset_size[0])) * float(span_x)
        tile_bbox['e'] = float(min_x) + (float(xi + 1) * float(
            tile_size_overlap[0]) / float(tileset_size[0])) * float(span_x)
        yi = 0
        while yi < yimax:
            tile_bbox['s'] = float(min_y) + (float(yi) * float(
                tile_size[1]) / float(tileset_size[1])) * float(span_y)
            tile_bbox['n'] = float(min_y) + (float(yi + 1) * float(
                tile_size_overlap[1]) / float(tileset_size[1])) * float(span_y)
            tile_bbox_points = bboxToPoints(tile_bbox)
            tile_dest_bbox_points, errors = projectPoints(tile_bbox_points,
                                                          source=srs_source,
                                                          dest=srs_dest)
            tile_dest_bbox = pointsToBbox(tile_dest_bbox_points)
            if bboxesIntersect(tile_dest_bbox, dest_bbox):
                if flags['w']:
                    print("bbox=%s,%s,%s,%s&width=%s&height=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                elif flags['g']:
                    print("w=%s;s=%s;e=%s;n=%s;cols=%s;rows=%s" %
                          (tile_bbox['w'], tile_bbox['s'], tile_bbox['e'],
                           tile_bbox['n'], tile_size_overlap[0],
                           tile_size_overlap[1]))
                else:
                    print("%s%s%s%s%s%s%s%s%s%s%s" %
                          (tile_bbox['w'], fs, tile_bbox['s'], fs,
                           tile_bbox['e'], fs, tile_bbox['n'], fs,
                           tile_size_overlap[0], fs, tile_size_overlap[1]))
            yi += 1
        xi += 1
Example #54
0
def main():
    global tmp

    fs = separator(options['separator'])
    threeD = flags['z']

    prog = 'v.in.lines'

    if threeD:
        do3D = 'z'
    else:
        do3D = ''

    tmp = grass.tempfile()

    # set up input file
    if options['input'] == '-':
        infile = None
        inf = sys.stdin
    else:
        infile = options['input']
        if not os.path.exists(infile):
            grass.fatal(_("Unable to read input file <%s>") % infile)
        grass.debug("input file=[%s]" % infile)

    if not infile:
        # read from stdin and write to tmpfile (v.in.mapgen wants a real file)
        outf = file(tmp, 'w')
        for line in inf:
            if len(line.lstrip()) == 0 or line[0] == '#':
                continue
            outf.write(line.replace(fs, ' '))

        outf.close()
        runfile = tmp
    else:
        # read from a real file
        if fs == ' ':
            runfile = infile
        else:
            inf = file(infile)
            outf = file(tmp, 'w')

            for line in inf:
                if len(line.lstrip()) == 0 or line[0] == '#':
                    continue
                outf.write(line.replace(fs, ' '))

            inf.close()
            outf.close()
            runfile = tmp

    # check that there are at least two columns (three if -z is given)
    inf = file(runfile)
    for line in inf:
        if len(line.lstrip()) == 0 or line[0] == '#':
            continue
        numcols = len(line.split())
        break
    inf.close()
    if (do3D and numcols < 3) or (not do3D and numcols < 2):
        grass.fatal(_("Not enough data columns. (incorrect fs setting?)"))

    grass.run_command('v.in.mapgen',
                      flags='f' + do3D,
                      input=runfile,
                      output=options['output'])
Example #55
0
def main():
    infile = options['input']
    compression_off = flags['c']
    mapset = None
    if '@' in infile:
        infile, mapset = infile.split('@')

    if options['output']:
        outfile_path, outfile_base = os.path.split(os.path.abspath(options['output']))
    else:
        outfile_path, outfile_base = os.path.split(os.path.abspath(infile + ".pack"))
    
    outfile = os.path.join(outfile_path, outfile_base)
    
    global tmp
    tmp = grass.tempdir()
    tmp_dir = os.path.join(tmp, infile)
    os.mkdir(tmp_dir)
    grass.debug('tmp_dir = %s' % tmp_dir)
    
    gfile = grass.find_file(name = infile, element = 'cell', mapset = mapset)
    if not gfile['name']:
        grass.fatal(_("Raster map <%s> not found") % infile)
    
    if os.path.exists(outfile):
        if os.getenv('GRASS_OVERWRITE'):
            grass.warning(_("Pack file <%s> already exists and will be overwritten") % outfile)
            try_remove(outfile)
        else:
            grass.fatal(_("option <output>: <%s> exists.") % outfile)
    
    grass.message(_("Packing <%s> to <%s>...") % (gfile['fullname'], outfile))
    basedir = os.path.sep.join(os.path.normpath(gfile['file']).split(os.path.sep)[:-2])
    olddir  = os.getcwd()
    
    # copy elements
    for element in ['cats', 'cell', 'cellhd', 'colr', 'fcell', 'hist']:
        path = os.path.join(basedir, element, infile)
        if os.path.exists(path):
            grass.debug('copying %s' % path)
            shutil.copyfile(path,
                            os.path.join(tmp_dir, element))
            
    if os.path.exists(os.path.join(basedir, 'cell_misc', infile)):
        shutil.copytree(os.path.join(basedir, 'cell_misc', infile),
                        os.path.join(tmp_dir, 'cell_misc'))
        
    if not os.listdir(tmp_dir):
        grass.fatal(_("No raster map components found"))
                    
    # copy projection info
    # (would prefer to use g.proj*, but this way is 5.3 and 5.7 compat)
    gisenv = grass.gisenv()
    for support in ['INFO', 'UNITS', 'EPSG']:
        path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'],
                            'PERMANENT', 'PROJ_' + support)
        if os.path.exists(path):
            shutil.copyfile(path, os.path.join(tmp_dir, 'PROJ_' + support))
    
    # pack it all up
    os.chdir(tmp)
    if compression_off:
        tar = tarfile.TarFile.open(name = outfile_base, mode = 'w:')
    else:
        tar = tarfile.TarFile.open(name = outfile_base, mode = 'w:gz')
    tar.add(infile, recursive = True)
    tar.close()
    try:
        shutil.move(outfile_base, outfile)
    except shutil.Error as e:
        grass.fatal(e)
        
    os.chdir(olddir)
    
    grass.verbose(_("Raster map saved to '%s'" % outfile))
Example #56
0
def main():
    G_gisinit(sys.argv[0])

    inmap = options["input"]
    outmap = options["output"]
    skip = int(options["skip"])
    go_vert = flags["v"]

    if go_vert:
        sys.exit("Vertical lines are yet to do.")

    ##### Query the region
    region = Cell_head()
    G_get_window(byref(region))

    #### Raster map setup
    # find raster map in search path
    mapset = None
    if "@" in inmap:
        inmap, mapset = inmap.split("@")

    gfile = grass.find_file(name=inmap, element="cell", mapset=mapset)
    if not gfile["name"]:
        grass.fatal(_("Raster map <%s> not found") % inmap)

    # determine the inputmap type (CELL/FCELL/DCELL)
    data_type = Rast_map_type(inmap, mapset)

    if data_type == CELL_TYPE:
        ptype = POINTER(c_int)
        type_name = "CELL"
    elif data_type == FCELL_TYPE:
        ptype = POINTER(c_float)
        type_name = "FCELL"
    elif data_type == DCELL_TYPE:
        ptype = POINTER(c_double)
        type_name = "DCELL"

    # print "Raster map <%s> contains data type %s." % (inmap, type_name)

    in_fd = Rast_open_old(inmap, mapset)
    in_rast = Rast_allocate_buf(data_type)
    in_rast = cast(c_void_p(in_rast), ptype)

    rows = Rast_window_rows()
    cols = Rast_window_cols()
    # print "Current region is %d rows x %d columns" % (rows, cols)

    #### Vector map setup
    # define map structure
    map_info = pointer(Map_info())

    # define open level (level 2: topology)
    Vect_set_open_level(2)

    # open new 3D vector map
    Vect_open_new(map_info, outmap, True)
    print("ddd")
    Vect_hist_command(map_info)

    # Create and initialize structs to store points/lines and category numbers
    Points = Vect_new_line_struct()
    Cats = Vect_new_cats_struct()
    fea_type = GV_LINE

    LineArrayType = c_double * cols
    xL = LineArrayType()
    yL = LineArrayType()
    zL = LineArrayType()

    #### iterate through map rows
    for row in range(rows):
        if row % skip != 0:
            continue

        # read a row of raster data into memory, then print it
        Rast_get_row(in_fd, in_rast, row, data_type)
        # print row, in_rast[0:cols]
        # print row, in_rast[0:5]

        # y-value
        coor_row_static = Rast_row_to_northing((row + 0.5), byref(region))
        # x-end nodes
        # coor_col_min = G_col_to_easting((0 + 0.5), byref(region))
        # coor_col_max = G_col_to_easting((cols - 0.5), byref(region))
        # print '  ',coor_row_static,coor_col_min,coor_col_max

        # reset
        n = 0
        for col in range(cols):
            xL[col] = yL[col] = zL[col] = 0

        # TODO check for NULL
        for col in range(cols):
            #            if not G_is_null_value(byref(in_rast[col]), data_type):
            if in_rast[col] > -2e9:
                xL[n] = Rast_col_to_easting((col + 0.5), byref(region))
                yL[n] = coor_row_static
                zL[n] = in_rast[col]
                n = n + 1

        # print valid_cols,n
        Vect_cat_del(Cats, 1)
        # beware if you care, this creates a cat 0
        Vect_cat_set(Cats, 1, row)
        Vect_reset_line(Points)
        Vect_copy_xyz_to_pnts(Points, xL, yL, zL, n)
        Vect_write_line(map_info, fea_type, Points, Cats)

    # Build topology for vector map and close them all
    Vect_build(map_info)
    Vect_close(map_info)
    Rast_close(in_fd)
    G_done_msg("")
Example #57
0
    # FIXME: since module descriptions are used again we have now the third
    # copy of the same string (one is in modules)
    elif menu == 'module_tree':
        from lmgr.menudata import LayerManagerModuleTree
        from core.globalvar import WXGUIDIR
        filename = os.path.join(WXGUIDIR, 'xml', 'module_tree_menudata.xml')
        menudata = LayerManagerModuleTree(filename)
    elif menu == 'modeler':
        from gmodeler.menudata import ModelerMenuData
        menudata = ModelerMenuData()
    elif menu == 'psmap':
        from psmap.menudata import PsMapMenuData
        menudata = PsMapMenuData()
    else:
        import grass.script.core as gscore
        gscore.fatal("Unknown value for parameter menu: " % menu)

    if action == 'strings':
        menudata.PrintStrings(sys.stdout)
    elif action == 'tree':
        menudata.PrintTree(sys.stdout)
    elif action == 'commands':
        menudata.PrintCommands(sys.stdout)
    elif action == 'dump':
        print menudata.model
    else:
        import grass.script.core as gscore
        gscore.fatal("Unknown value for parameter action: " % action)

    sys.exit(0)
Example #58
0
def main():
    developments = options['development'].split(',')
    observed_popul_file = options['observed_population']
    projected_popul_file = options['projected_population']
    sep = gutils.separator(options['separator'])
    subregions = options['subregions']
    methods = options['method'].split(',')
    plot = options['plot']
    simulation_times = [
        float(each) for each in options['simulation_times'].split(',')
    ]

    for each in methods:
        if each in ('exp_approach', 'logarithmic2'):
            try:
                from scipy.optimize import curve_fit
            except ImportError:
                gcore.fatal(
                    _("Importing scipy failed. Method '{m}' is not available").
                    format(m=each))

    # exp approach needs at least 3 data points
    if len(developments) <= 2 and ('exp_approach' in methods
                                   or 'logarithmic2' in methods):
        gcore.fatal(_("Not enough data for method 'exp_approach'"))
    if len(developments) == 3 and ('exp_approach' in methods
                                   and 'logarithmic2' in methods):
        gcore.warning(
            _("Can't decide between 'exp_approach' and 'logarithmic2' methods"
              " because both methods can have exact solutions for 3 data points resulting in RMSE = 0"
              ))
    observed_popul = np.genfromtxt(observed_popul_file,
                                   dtype=float,
                                   delimiter=sep,
                                   names=True)
    projected_popul = np.genfromtxt(projected_popul_file,
                                    dtype=float,
                                    delimiter=sep,
                                    names=True)
    year_col = observed_popul.dtype.names[0]
    observed_times = observed_popul[year_col]
    year_col = projected_popul.dtype.names[0]
    projected_times = projected_popul[year_col]

    if len(developments) != len(observed_times):
        gcore.fatal(
            _("Number of development raster maps doesn't not correspond to the number of observed times"
              ))

    # gather developed cells in subregions
    gcore.info(_("Computing number of developed cells..."))
    table_developed = {}
    subregionIds = set()
    for i in range(len(observed_times)):
        gcore.percent(i, len(observed_times), 1)
        data = gcore.read_command('r.univar',
                                  flags='gt',
                                  zones=subregions,
                                  map=developments[i])
        for line in data.splitlines():
            stats = line.split('|')
            if stats[0] == 'zone':
                continue
            subregionId, developed_cells = stats[0], int(stats[12])
            subregionIds.add(subregionId)
            if i == 0:
                table_developed[subregionId] = []
            table_developed[subregionId].append(developed_cells)
        gcore.percent(1, 1, 1)
    subregionIds = sorted(list(subregionIds))
    # linear interpolation between population points
    population_for_simulated_times = {}
    for subregionId in table_developed.keys():
        population_for_simulated_times[subregionId] = np.interp(
            x=simulation_times,
            xp=np.append(observed_times, projected_times),
            fp=np.append(observed_popul[subregionId],
                         projected_popul[subregionId]))
    # regression
    demand = {}
    i = 0
    if plot:
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        n_plots = np.ceil(np.sqrt(len(subregionIds)))
        fig = plt.figure(figsize=(5 * n_plots, 5 * n_plots))

    for subregionId in subregionIds:
        i += 1
        rmse = dict()
        predicted = dict()
        simulated = dict()
        coeff = dict()
        for method in methods:
            # observed population points for subregion
            reg_pop = observed_popul[subregionId]
            simulated[method] = np.array(
                population_for_simulated_times[subregionId])

            if method in ('exp_approach', 'logarithmic2'):
                # we have to scale it first
                y = np.array(table_developed[subregionId])
                magn = float(
                    np.power(
                        10,
                        max(magnitude(np.max(reg_pop)), magnitude(np.max(y)))))
                x = reg_pop / magn
                y = y / magn
                if method == 'exp_approach':
                    initial = (
                        0.5, np.mean(x), np.mean(y)
                    )  # this seems to work best for our data for exp_approach
                elif method == 'logarithmic2':
                    popt, pcov = curve_fit(logarithmic, x, y)
                    initial = (popt[0], popt[1], 0)
                with np.errstate(
                        invalid='warn'
                ):  # when 'raise' it stops every time on FloatingPointError
                    try:
                        popt, pcov = curve_fit(globals()[method],
                                               x,
                                               y,
                                               p0=initial)
                        if np.isnan(popt).any():
                            raise RuntimeError
                        # would result in nans in predicted
                        if method == 'logarithmic2' and np.any(
                                simulated[method] / magn <= popt[-1]):
                            raise RuntimeError
                    except (FloatingPointError, RuntimeError):
                        rmse[
                            method] = sys.maxsize  # so that other method is selected
                        gcore.warning(
                            _("Method '{m}' cannot converge for subregion {reg}"
                              .format(m=method, reg=subregionId)))
                        if len(methods) == 1:
                            gcore.fatal(
                                _("Method '{m}' failed for subregion {reg},"
                                  " please select at least one other method").
                                format(m=method, reg=subregionId))
                    else:
                        predicted[method] = globals()[method](
                            simulated[method] / magn, *popt) * magn
                        r = globals()[method](
                            x, *popt) * magn - table_developed[subregionId]
                        coeff[method] = popt
                        if len(reg_pop) > 3:
                            rmse[method] = np.sqrt(
                                (np.sum(r * r) / (len(reg_pop) - 3)))
                        else:
                            rmse[method] = 0
            else:
                if method == 'logarithmic':
                    reg_pop = np.log(reg_pop)
                if method == 'exponential':
                    y = np.log(table_developed[subregionId])
                else:
                    y = table_developed[subregionId]
                A = np.vstack((reg_pop, np.ones(len(reg_pop)))).T
                npversion = [int(x) for x in np.__version__.split('.')]
                if npversion >= [1, 14, 0]:
                    rcond = None
                else:
                    rcond = -1
                m, c = np.linalg.lstsq(A, y, rcond=rcond)[0]  # y = mx + c
                coeff[method] = m, c

                if method == 'logarithmic':
                    with np.errstate(invalid='ignore', divide='ignore'):
                        predicted[method] = np.where(
                            simulated[method] > 1,
                            np.log(simulated[method]) * m + c, 0)
                    predicted[method] = np.where(predicted[method] > 0,
                                                 predicted[method], 0)
                    r = (reg_pop * m + c) - table_developed[subregionId]
                elif method == 'exponential':
                    predicted[method] = np.exp(m * simulated[method] + c)
                    r = np.exp(m * reg_pop + c) - table_developed[subregionId]
                else:  # linear
                    predicted[method] = simulated[method] * m + c
                    r = (reg_pop * m + c) - table_developed[subregionId]
                # RMSE
                if len(reg_pop) > 2:
                    rmse[method] = np.sqrt(
                        (np.sum(r * r) / (len(reg_pop) - 2)))
                else:
                    rmse[method] = 0

        method = min(rmse, key=rmse.get)
        gcore.verbose(
            _("Method '{meth}' was selected for subregion {reg}").format(
                meth=method, reg=subregionId))
        # write demand
        demand[subregionId] = predicted[method]
        demand[subregionId] = np.diff(demand[subregionId])
        if np.any(demand[subregionId] < 0):
            gcore.warning(
                _("Subregion {sub} has negative numbers"
                  " of newly developed cells, changing to zero".format(
                      sub=subregionId)))
            demand[subregionId][demand[subregionId] < 0] = 0
        if coeff[method][0] < 0:
            # couldn't establish reliable population-area
            # project by number of developed pixels in analyzed period
            range_developed = table_developed[subregionId][
                -1] - table_developed[subregionId][0]
            range_times = observed_times[-1] - observed_times[0]
            dev_per_step = math.ceil(range_developed / float(range_times))
            # this assumes demand is projected yearly
            demand[subregionId].fill(dev_per_step if dev_per_step > 0 else 0)
            gcore.warning(
                _("For subregion {sub} population and development are inversely proportional,"
                  " demand will be interpolated based on prior change in development only."
                  .format(sub=subregionId)))

        # draw
        if plot:
            ax = fig.add_subplot(n_plots, n_plots, i)
            ax.set_title("{sid}, RMSE: {rmse:.3f}".format(sid=subregionId,
                                                          rmse=rmse[method]))
            ax.set_xlabel('population')
            ax.set_ylabel('developed cells')
            # plot known points
            x = np.array(observed_popul[subregionId])
            y = np.array(table_developed[subregionId])
            ax.plot(x, y, marker='o', linestyle='', markersize=8)
            # plot predicted curve
            x_pred = np.linspace(
                np.min(x),
                np.max(np.array(population_for_simulated_times[subregionId])),
                30)
            cf = coeff[method]
            if method == 'linear':
                line = x_pred * cf[0] + cf[1]
                label = "$y = {c:.3f} + {m:.3f} x$".format(m=cf[0], c=cf[1])
            elif method == 'logarithmic':
                line = np.log(x_pred) * cf[0] + cf[1]
                label = "$y = {c:.3f} + {m:.3f} \ln(x)$".format(m=cf[0],
                                                                c=cf[1])
            elif method == 'exponential':
                line = np.exp(x_pred * cf[0] + cf[1])
                label = "$y = {c:.3f} e^{{{m:.3f}x}}$".format(m=cf[0],
                                                              c=np.exp(cf[1]))
            elif method == 'exp_approach':
                line = exp_approach(x_pred / magn, *cf) * magn
                label = "$y = (1 -  e^{{-{A:.3f}(x-{B:.3f})}}) + {C:.3f}$".format(
                    A=cf[0], B=cf[1], C=cf[2])
            elif method == 'logarithmic2':
                line = logarithmic2(x_pred / magn, *cf) * magn
                label = "$y = {A:.3f} + {B:.3f} \ln(x-{C:.3f})$".format(
                    A=cf[0], B=cf[1], C=cf[2])

            ax.plot(x_pred, line, label=label)
            ax.plot(simulated[method],
                    predicted[method],
                    linestyle='',
                    marker='o',
                    markerfacecolor='None')
            plt.legend(loc=0)
            labels = ax.get_xticklabels()
            plt.setp(labels, rotation=30)
    if plot:
        plt.tight_layout()
        fig.savefig(plot)

    # write demand
    with open(options['demand'], 'w') as f:
        header = observed_popul.dtype.names  # the order is kept here
        header = [header[0]
                  ] + [sub for sub in header[1:] if sub in subregionIds]
        f.write(sep.join(header))
        f.write('\n')
        i = 0
        for time in simulation_times[1:]:
            f.write(str(int(time)))
            f.write(sep)
            # put 0 where there are more counties but are not in region
            for sub in header[1:]:  # to keep order of subregions
                f.write(str(int(demand[sub][i])))
                if sub != header[-1]:
                    f.write(sep)
            f.write('\n')
            i += 1
Example #59
0
def main():
    global tmp

    infile = options["input"]
    output = options["output"]
    matlab = flags["f"]
    threeD = flags["z"]

    prog = "v.in.mapgen"

    opts = ""

    if not os.path.isfile(infile):
        grass.fatal(_("Input file <%s> not found") % infile)

    if output:
        name = output
    else:
        name = ""

    if threeD:
        matlab = True

    if threeD:
        do3D = "z"
    else:
        do3D = ""

    tmp = grass.tempfile()

    # create ascii vector file
    inf = open(infile)
    outf = open(tmp, "w")

    grass.message(_("Importing data..."))
    cat = 1
    if matlab:
        # HB:  OLD v.in.mapgen.sh Matlab import command follows.
        # I have no idea what it's all about, so "new" matlab format will be
        # a series of x y with "nan nan" breaking lines. (as NOAA provides)
        # Old command:
        #  tac $infile | $AWK 'BEGIN { FS="," ; R=0 }
        #    $1~/\d*/   { printf("L %d\n", R) }
        #    $1~/   .*/ { printf(" %lf %lf\n", $2, $1) ; ++R }
        #    $1~/END/   { }' | tac > "$TMP"

        # matlab format.
        points = []

        for line in inf:
            f = line.split()
            if f[0].lower() == "nan":
                if points != []:
                    outf.write("L %d 1\n" % len(points))
                    for point in points:
                        outf.write(" %.15g %.15g %.15g\n" %
                                   tuple(map(float, point)))
                    outf.write(" 1 %d\n" % cat)
                    cat += 1
                points = []
            else:
                if len(f) == 2:
                    f.append("0")
                points.append(f)

        if points != []:
            outf.write("L %d 1\n" % len(points))
            for point in points:
                try:
                    outf.write(" %.15g %.15g %.15g\n" %
                               tuple(map(float, point)))
                except ValueError:
                    grass.fatal(
                        _("An error occurred on line '%s', exiting.") %
                        line.strip())
            outf.write(" 1 %d\n" % cat)
            cat += 1
    else:
        # mapgen format.
        points = []
        for line in inf:
            if line[0] == "#":
                if points != []:
                    outf.write("L %d 1\n" % len(points))
                    for point in points:
                        outf.write(" %.15g %.15g\n" % tuple(map(float, point)))
                    outf.write(" 1 %d\n" % cat)
                    cat += 1
                points = []
            else:
                points.append(line.rstrip("\r\n").split("\t"))

        if points != []:
            outf.write("L %d 1\n" % len(points))
            for point in points:
                outf.write(" %.15g %.15g\n" % tuple(map(float, point)))
            outf.write(" 1 %d\n" % cat)
            cat += 1
    outf.close()
    inf.close()

    # create digit header
    digfile = tmp + ".dig"
    outf = open(digfile, "w")
    t = string.Template("""ORGANIZATION: GRASSroots organization
DIGIT DATE:   $date
DIGIT NAME:   $user@$host
MAP NAME:     $name
MAP DATE:     $year
MAP SCALE:    1
OTHER INFO:   Imported with $prog
ZONE:         0
MAP THRESH:   0
VERTI:
""")
    date = time.strftime("%m/%d/%y")
    year = time.strftime("%Y")
    user = os.getenv("USERNAME") or os.getenv("LOGNAME")
    host = os.getenv("COMPUTERNAME") or os.uname()[1]

    s = t.substitute(prog=prog,
                     name=name,
                     date=date,
                     year=year,
                     user=user,
                     host=host)
    outf.write(s)

    # process points list to ascii vector file (merge in vertices)
    inf = open(tmp)
    shutil.copyfileobj(inf, outf)
    inf.close()

    outf.close()

    if not name:
        # if no name for vector file given, cat to stdout
        inf = open(digfile)
        shutil.copyfileobj(inf, sys.stdout)
        inf.close()
    else:
        # import to binary vector file
        grass.message(_("Importing with v.in.ascii..."))
        try:
            grass.run_command("v.in.ascii",
                              flags=do3D,
                              input=digfile,
                              output=name,
                              format="standard")
        except CalledModuleError:
            grass.fatal(
                _('An error occurred on creating "%s", please check') % name)
Example #60
0
def main():
    layers = options["map"].split(",")

    if len(layers) < 2:
        gcore.error(_("At least 2 maps are required"))

    tmpfile = gcore.tempfile()

    for map in layers:
        if not gcore.find_file(map, element="cell")["file"]:
            gcore.fatal(_("Raster map <%s> not found") % map)

    try:
        gcore.write_command(
            "d.text", color="black", size=4, line=1, stdin="CORRELATION"
        )
    except CalledModuleError:
        return 1

    os.environ["GRASS_RENDER_FILE_READ"] = "TRUE"

    colors = "red black blue green gray violet".split()
    line = 2
    iloop = 0
    jloop = 0
    for iloop, i in enumerate(layers):
        for jloop, j in enumerate(layers):
            if i != j and iloop <= jloop:
                color = colors[0]
                colors = colors[1:]
                colors.append(color)
                gcore.write_command(
                    "d.text", color=color, size=4, line=line, stdin="%s %s" % (i, j)
                )
                line += 1

                ofile = open(tmpfile, "w")
                gcore.run_command("r.stats", flags="cnA", input=(i, j), stdout=ofile)
                ofile.close()

                ifile = open(tmpfile, "r")
                first = True
                for line in ifile:
                    f = line.rstrip("\r\n").split(" ")
                    x = float(f[0])
                    y = float(f[1])
                    if first:
                        minx = maxx = x
                        miny = maxy = y
                        first = False
                    if minx > x:
                        minx = x
                    if maxx < x:
                        maxx = x
                    if miny > y:
                        miny = y
                    if maxy < y:
                        maxy = y
                ifile.close()

                kx = 100.0 / (maxx - minx + 1)
                ky = 100.0 / (maxy - miny + 1)

                p = gcore.feed_command("d.graph", color=color)
                ofile = p.stdin

                ifile = open(tmpfile, "r")
                for line in ifile:
                    f = line.rstrip("\r\n").split(" ")
                    x = float(f[0])
                    y = float(f[1])
                    ofile.write(
                        b"icon + 0.1 %f %f\n"
                        % ((x - minx + 1) * kx, (y - miny + 1) * ky)
                    )
                ifile.close()

                ofile.close()
                p.wait()

    try_remove(tmpfile)

    return 0