def parseModules(): """Parse modules' interface""" modules = dict() # list of modules to be ignored ignore = [ "g.mapsets_picker.py", "v.type_wrapper.py", "g.parser", "vcolors" ] count = len(grassCmd) i = 0 for module in grassCmd: i += 1 if i % 10 == 0: grass.info("* %d/%d" % (i, count)) if module in ignore: continue try: interface = gtask.parse_interface(module) except Exception as e: grass.error(module + ": " + str(e)) continue modules[interface.name] = { "label": interface.label, "desc": interface.description, "keywords": interface.keywords, } return modules
def main(): try: from osgeo import osr except ImportError: grass.fatal( _( "Unable to load GDAL Python bindings (requires package " "'python-gdal' being installed)" ), ) epsg = options["epsg"] pathwkt = options["wkt"] if epsg and pathwkt: grass.error("Only one type of conversions can be processed concurrently") if epsg: epsg2standards(epsg) else: if pathwkt: try: io = open(pathwkt, "r") wkt = io.read().rstrip() wkt2standards(wkt) except IOError as e: grass.error("Unable to open file <%s>: %s" % (e.errno, e.strerror)) else: grassEpsg()
def parseModules(): """Parse modules' interface""" modules = dict() # list of modules to be ignored ignore = ['g.mapsets_picker.py', 'v.type_wrapper.py', 'g.parser', 'vcolors'] count = len(grassCmd) i = 0 for module in grassCmd: i += 1 if i % 10 == 0: grass.info('* %d/%d' % (i, count)) if module in ignore: continue try: interface = gtask.parse_interface(module) except Exception as e: grass.error(module + ': ' + str(e)) continue modules[interface.name] = {'label': interface.label, 'desc': interface.description, 'keywords': interface.keywords} return modules
def main(): try: global server from pycsw import server except ModuleNotFoundError as e: msg = e.msg grass.fatal( globalvar.MODULE_NOT_FOUND.format(lib=msg.split("'")[-2], url=globalvar.MODULE_URL)) path = options["path"] port = int(options["port"]) path = os.path.dirname(path) app_path = os.path.dirname(path) sys.path.append(app_path) from wsgiref.simple_server import make_server try: httpd = make_server("", port, application) grass.message("Serving on port %d..." % port) except Exception as e: grass.error(str(e)) sys.stdout.flush() sys.exit() httpd.serve_forever() sys.stdout.flush()
def writeEPSGtoPEMANENT(epsg): env = grass.gisenv() gisdbase = env["GISDBASE"] location = env["LOCATION_NAME"] path = os.path.join(gisdbase, location, "PERMANENT", "PROJ_EPSG") if os.path.isfile(path): # if already file exist if os.getenv("GRASS_OVERWRITE", False): try: io = open(path, "w") io.write("epsg: %s" % epsg) io.close() grass.message("EPSG code have been written to <%s>" % path) except IOError as e: grass.error("I/O error({0}): {1}".format(e.errno, e.strerror)) else: grass.message("EPSG file already exist <%s>" % path) else: try: io = open(path, "w") io.write("epsg: %s" % epsg) io.close() grass.message("EPSG code have been written to <%s>" % path) except IOError as e: grass.error("I/O error({0}): {1}".format(e.errno, e.strerror))
def grassEpsg(): proj = Module('g.proj', flags='p', quiet=True, stdout_=PIPE) proj = proj.outputs.stdout lines = proj.splitlines() for e,line in enumerate(lines): if 'EPSG' in line: epsg = lines[e+1].split(':')[1].replace(' ','') print('epsg=%s' % epsg) if flags['s']: if isPermanent(): writeEPSGtoPEMANENT(epsg) else: grass.warning("Unable to access PERMANENT mapset") return try: proj = Module('g.proj', flags='wf', quiet=True, stdout_=PIPE) proj = proj.outputs.stdout wkt2standards(proj) except: grass.error('WKT input error')
def main(): env = grass.gisenv() mapset = env['MAPSET'] ret = 0 vectors = grass.list_grouped('vect')[mapset] num_vectors = len(vectors) if grass.verbosity() < 2: quiet = True else: quiet = False i = 1 for vect in vectors: map = "%s@%s" % (vect, mapset) grass.message( _("%s\nBuilding topology for vector map <%s> (%d of %d)...\n%s") % ('-' * 80, map, i, num_vectors, '-' * 80)) grass.verbose(_("v.build map=%s") % map) try: grass.run_command("v.build", map=map, quiet=quiet) except CalledModuleError: grass.error( _("Building topology for vector map <%s> failed") % map) ret = 1 i += 1 return ret
def main(): env = grass.gisenv() mapset = env['MAPSET'] ret = 0 vectors = grass.list_grouped('vect')[mapset] num_vectors = len(vectors) if grass.verbosity() < 2: quiet = True else: quiet = False i = 1 for vect in vectors: map = "%s@%s" % (vect, mapset) grass.message(_("%s\nBuilding topology for vector map <%s> (%d of %d)...\n%s") % \ ('-' * 80, map, i, num_vectors, '-' * 80)) grass.verbose(_("v.build map=%s") % map) if grass.run_command("v.build", map = map, quiet = quiet) != 0: grass.error(_("Building topology for vector map <%s> failed") % map) ret = 1 i += 1 return ret
def _render(self, cmd, env): try: # TODO: use errors=status when working grass.run_command(cmd[0], env=env, **cmd[1]) return 0 except CalledModuleError as e: grass.error(e) return 1
def run_one_combination(comb_count, comb_all, repeat, seed, development_start, compactness_mean, compactness_range, discount_factor, patches_file, fut_options, threshold, hist_bins_area_orig, hist_range_area_orig, hist_bins_compactness_orig, hist_range_compactness_orig, cell_size, histogram_area_orig, histogram_compactness_orig, tmp_name, queue): TMP_PROCESS = [] # unique name, must be sql compliant suffix = (str(discount_factor) + str(compactness_mean) + str(compactness_range)).replace('.', '') simulation_dev_end = tmp_name + 'simulation_dev_end_' + suffix simulation_dev_diff = tmp_name + 'simulation_dev_diff' + suffix tmp_clump = tmp_name + 'tmp_clump' + suffix TMP_PROCESS.append(simulation_dev_diff) TMP_PROCESS.append(simulation_dev_end) TMP_PROCESS.append(tmp_clump) sum_dist_area = 0 sum_dist_compactness = 0 # offset seed seed *= 10000 for i in range(repeat): f_seed = seed + i gcore.message(_("Running calibration combination {comb_count}/{comb_all}" " of simulation attempt {i}/{repeat} with random seed {s}...".format(comb_count=comb_count, comb_all=comb_all, i=i + 1, repeat=repeat, s=f_seed))) try: run_simulation(development_start=development_start, development_end=simulation_dev_end, compactness_mean=compactness_mean, compactness_range=compactness_range, discount_factor=discount_factor, patches_file=patches_file, seed=f_seed, fut_options=fut_options) except CalledModuleError as e: queue.put(None) cleanup(tmp=TMP_PROCESS) gcore.error(_("Running r.futures.pga failed. Details: {e}").format(e=e)) return new_development(simulation_dev_end, simulation_dev_diff) data = patch_analysis(simulation_dev_diff, threshold, tmp_clump) sim_hist_area, sim_hist_compactness = create_histograms(data, hist_bins_area_orig, hist_range_area_orig, hist_bins_compactness_orig, hist_range_compactness_orig, cell_size) sum_dist_area += compare_histograms(histogram_area_orig, sim_hist_area) sum_dist_compactness += compare_histograms(histogram_compactness_orig, sim_hist_compactness) mean_dist_area = sum_dist_area / repeat mean_dist_compactness = sum_dist_compactness / repeat data = {} data['input_discount_factor'] = discount_factor data['input_compactness_mean'] = compactness_mean data['input_compactness_range'] = compactness_range data['area_distance'] = mean_dist_area data['compactness_distance'] = mean_dist_compactness queue.put(data) cleanup(tmp=TMP_PROCESS)
def main(): test_file = options['test'] expected = grass.tempfile() result = grass.tempfile() dbconn = grassdb.db_connection() grass.message(_("Using DB driver: %s") % dbconn['driver']) infile = os.path.join(os.environ['GISBASE'], 'etc', 'db.test', test_file) inf = file(infile) while True: type = inf.readline() if not type: break type = type.rstrip('\r\n') sql = inf.readline().rstrip('\r\n') sys.stdout.write(sql + '\n') # Copy expected result to temp file try: if type == 'X': grass.write_command('db.execute', input='-', stdin=sql + '\n') else: resf = file(result, 'w') grass.write_command('db.select', input='-', flags='c', stdin=sql + '\n', stdout=resf) resf.close() except CalledModuleError: grass.error("EXECUTE: ******** ERROR ********") else: grass.message(_("EXECUTE: OK")) expf = file(expected, 'w') while True: res = inf.readline().rstrip('\r\n') if not res: break expf.write(res + '\n') expf.close() if type == 'S': if grass.call(['diff', result, expected]) != 0: grass.error("RESULT: ******** ERROR ********") else: grass.message(_("RESULT: OK"))
def remove_extension(force = False): # try to read XML metadata file first fXML = os.path.join(options['prefix'], 'modules.xml') name = options['extension'] if name not in get_installed_extensions(): grass.warning(_("Extension <%s> not found") % name) if force: grass.verbose(_("List of removed files:")) else: grass.info(_("Files to be removed (use flag 'f' to force removal):")) if os.path.exists(fXML): f = open(fXML, 'r') tree = etree.fromstring(f.read()) flist = [] for task in tree.findall('task'): if name == task.get('name', default = '') and \ task.find('binary') is not None: for f in task.find('binary').findall('file'): flist.append(f.text) if flist: removed = False err = list() for fpath in flist: try: if force: grass.verbose(fpath) os.remove(fpath) removed = True else: print fpath except OSError: err.append((_("Unable to remove file '%s'") % fpath)) if force and not removed: grass.fatal(_("Extension <%s> not found") % options['extension']) if err: for e in err: grass.error(e) else: remove_extension_std(force) else: remove_extension_std(force) if force: grass.message(_("Updating metadata file...")) remove_extension_xml() grass.message(_("Extension <%s> successfully uninstalled.") % options['extension']) else: grass.warning(_("Extension <%s> not removed.\n" "Re-run '%s' with 'f' flag to force removal") % (options['extension'], 'g.extension'))
def main(): test_file = options["test"] expected = gcore.tempfile() result = gcore.tempfile() dbconn = grassdb.db_connection() gcore.message(_("Using DB driver: %s") % dbconn["driver"]) infile = os.path.join(os.environ["GISBASE"], "etc", "db.test", test_file) inf = open(infile) while True: type = inf.readline() if not type: break type = type.rstrip("\r\n") sql = inf.readline().rstrip("\r\n") sys.stdout.write(sql + "\n") # Copy expected result to temp file try: if type == "X": gcore.write_command("db.execute", input="-", stdin=sql + "\n") else: resf = open(result, "w") gcore.write_command("db.select", input="-", flags="c", stdin=sql + "\n", stdout=resf) resf.close() except CalledModuleError: gcore.error("EXECUTE: ******** ERROR ********") else: gcore.message(_("EXECUTE: OK")) expf = open(expected, "w") while True: res = inf.readline().rstrip("\r\n") if not res: break expf.write(res + "\n") expf.close() if type == "S": if gcore.call(["diff", result, expected]) != 0: gcore.error("RESULT: ******** ERROR ********") else: gcore.message(_("RESULT: OK"))
def remove_modules(mlist, force = False): # try to read XML metadata file first fXML = os.path.join(options['prefix'], 'modules.xml') installed = get_installed_modules() if os.path.exists(fXML): f = open(fXML, 'r') tree = etree.fromstring(f.read()) f.close() else: tree = None for name in mlist: if name not in installed: # try even if module does not seem to be available, # as the user may be trying to get rid of left over cruft grass.warning(_("Extension <%s> not found") % name) if tree is not None: flist = [] for task in tree.findall('task'): if name == task.get('name') and \ task.find('binary') is not None: for f in task.find('binary').findall('file'): flist.append(f.text) break if flist: removed = False err = list() for fpath in flist: try: if force: grass.verbose(fpath) removed = True os.remove(fpath) else: print fpath except OSError: err.append((_("Unable to remove file '%s'") % fpath)) if force and not removed: grass.fatal(_("Extension <%s> not found") % name) if err: for e in err: grass.error(e) else: remove_extension_std(name, force) else: remove_extension_std(name, force)
def main(): test_file = options['test'] expected = grass.tempfile() result = grass.tempfile() dbconn = grassdb.db_connection() grass.message(_("Using DB driver: %s") % dbconn['driver']) infile = os.path.join(os.environ['GISBASE'], 'etc', 'db.test', test_file) inf = file(infile) while True: type = inf.readline() if not type: break type = type.rstrip('\r\n') sql = inf.readline().rstrip('\r\n') sys.stdout.write(sql + '\n') # Copy expected result to temp file try: if type == 'X': grass.write_command('db.execute', input = '-', stdin = sql + '\n') else: resf = file(result, 'w') grass.write_command('db.select', input = '-', flags = 'c', stdin = sql + '\n', stdout = resf) resf.close() except CalledModuleError: grass.error("EXECUTE: ******** ERROR ********") else: grass.message(_("EXECUTE: OK")) expf = file(expected, 'w') while True: res = inf.readline().rstrip('\r\n') if not res: break expf.write(res + '\n') expf.close() if type == 'S': if grass.call(['diff', result, expected]) != 0: grass.error("RESULT: ******** ERROR ********") else: grass.message(_("RESULT: OK"))
def main(): path = options['path'] port = int(options['port']) path = os.path.dirname(path) app_path = os.path.dirname(path) sys.path.append(app_path) from wsgiref.simple_server import make_server try: httpd = make_server('', port, application) grass.message("Serving on port %d..." % port) except Exception, e: grass.error(str(e)) sys.stdout.flush() sys.exit()
def main(): epsg=options['epsg'] pathwkt=options['wkt'] if epsg and pathwkt: grass.error('Only one type of conversions can be processed concurrently') if epsg: epsg2standards(epsg) else: if pathwkt: try: io= open(pathwkt,'r') wkt=io.read().rstrip() wkt2standards(wkt) except IOError as e: grass.error('Unable to open file <%s>: %s'%(e.errno, e.strerror)) else: grassEpsg()
def wkt2standards(prj_txt): srs = osr.SpatialReference() srs.ImportFromESRI([prj_txt]) if flags["w"]: print("wkt=%s" % srs.ExportToWkt()) if flags["p"]: print("proj4=%s" % srs.ExportToProj4()) srs.AutoIdentifyEPSG() try: int(srs.GetAuthorityCode(None)) epsg = srs.GetAuthorityCode(None) print("epsg=%s" % epsg) if flags["s"]: if isPermanent(): writeEPSGtoPEMANENT(epsg) else: grass.warning("Unable to access PERMANENT mapset") except: grass.error("EPSG code cannot be identified")
def install_extension_xml(): # read metadata from remote server url = "http://grass.osgeo.org/addons/grass%s/modules.xml" % grass.version()['version'].split('.')[0] data = None try: f = urlopen(url) try: tree = etree.fromstring(f.read()) except: grass.warning(_("Unable to parse '%s'. Metadata file not updated.") % url) return for mnode in tree.findall('task'): name = mnode.get('name') if name != options['extension']: continue fList = list() bnode = mnode.find('binary') windows = sys.platform == 'win32' if bnode is not None: for fnode in bnode.findall('file'): path = fnode.text.split('/') if windows: if path[0] == 'bin': path[-1] += '.exe' if path[0] == 'scripts': path[-1] += '.bat' fList.append(os.path.sep.join(path)) desc, keyw = get_optional_params(mnode) data = { 'name' : name, 'desc' : desc, 'keyw' : keyw, 'files' : fList, } except HTTPError: grass.error(_("Unable to read metadata file from the remote server")) if not data: grass.warning(_("No metadata available")) return fXML = os.path.join(options['prefix'], 'modules.xml') # create an empty file if not exists if not os.path.exists(fXML): write_xml_modules(fXML) # read XML file fo = open(fXML, 'r') tree = etree.fromstring(fo.read()) fo.close() # update tree tnode = None for node in tree.findall('task'): if node.get('name') == options['extension']: tnode = node break if tnode is not None: # update existing node dnode = tnode.find('description') if dnode is not None: dnode.text = data['desc'] knode = tnode.find('keywords') if knode is not None: knode.text = data['keyw'] bnode = tnode.find('binary') if bnode is not None: tnode.remove(bnode) bnode = etree.Element('binary') for f in data['files']: fnode = etree.Element('file') fnode.text = f bnode.append(fnode) tnode.append(bnode) else: # create new node for task tnode = etree.Element('task', attrib = { 'name' : data['name'] }) dnode = etree.Element('description') dnode.text = data['desc'] tnode.append(dnode) knode = etree.Element('keywords') knode.text = data['keyw'] tnode.append(knode) bnode = etree.Element('binary') for f in data['files']: fnode = etree.Element('file') fnode.text = f bnode.append(fnode) tnode.append(bnode) tree.append(tnode) write_xml_modules(fXML, tree)
def main(): layers = options["map"].split(",") if len(layers) < 2: gcore.error(_("At least 2 maps are required")) tmpfile = gcore.tempfile() for map in layers: if not gcore.find_file(map, element="cell")["file"]: gcore.fatal(_("Raster map <%s> not found") % map) gcore.write_command("d.text", color="black", size=4, line=1, stdin="CORRELATION") os.environ["GRASS_RENDER_FILE_READ"] = "TRUE" colors = "red black blue green gray violet".split() line = 2 iloop = 0 jloop = 0 for iloop, i in enumerate(layers): for jloop, j in enumerate(layers): if i != j and iloop <= jloop: color = colors[0] colors = colors[1:] colors.append(color) gcore.write_command("d.text", color=color, size=4, line=line, stdin="%s %s" % (i, j)) line += 1 ofile = file(tmpfile, "w") gcore.run_command("r.stats", flags="cnA", input=(i, j), stdout=ofile) ofile.close() ifile = file(tmpfile, "r") first = True for l in ifile: f = l.rstrip("\r\n").split(" ") x = float(f[0]) y = float(f[1]) if first: minx = maxx = x miny = maxy = y first = False if minx > x: minx = x if maxx < x: maxx = x if miny > y: miny = y if maxy < y: maxy = y ifile.close() kx = 100.0 / (maxx - minx + 1) ky = 100.0 / (maxy - miny + 1) p = gcore.feed_command("d.graph", color=color) ofile = p.stdin ifile = file(tmpfile, "r") for l in ifile: f = l.rstrip("\r\n").split(" ") x = float(f[0]) y = float(f[1]) ofile.write("icon + 0.1 %f %f\n" % ((x - minx + 1) * kx, (y - miny + 1) * ky)) ifile.close() ofile.close() p.wait() try_remove(tmpfile)
def argParser(self, defaultConf, load_records, loadRecurs, setupDB, exportRecord, indexes, optimize, harvest, siteOut, deleteAll, cswURL, cswXML, force): if defaultConf is None: grass.error('Configure file is not exist') args = [] args.append('-c') if load_records: args.append('load_records') args.append('-p') args.append(load_records) if loadRecurs: args.append('-r') if force: args.append('y') return args if setupDB: args.append('setup_db') args.append('-f') args.append(defaultConf) return args if exportRecord: args.append('export_records') args.append('-p') args.append(exportRecord) args.append('-f') args.append(defaultConf) return args if indexes: args.append('rebuild_db_indexes') args.append('-f') args.append(defaultConf) return args if optimize: args.append('optimize_db') args.append('-f') args.append(defaultConf) return args if harvest: args.append('refresh_harvested_records') args.append('-f') args.append(defaultConf) return args if siteOut: args.append('gen_sitemaps') args.append('-o') args.append(siteOut) args.append('-f') args.append(defaultConf) return args if deleteAll: args.append('delete_records') args.append('-f') args.append(defaultConf) if force: args.append('y') return args if cswURL and cswXML: args.append('post_xml') args.append('-u') args.append(cswURL) args.append('-x') args.append(cswXML) args.append('-f') args.append(defaultConf) return args return False
def run(self, argv): if len(argv) == 0: grass.error('Nothing to do. Set args') return try: OPTS, ARGS = getopt.getopt(argv, 'c:f:ho:p:ru:x:s:t:y') except getopt.GetoptError as err: grass.error('\nERROR: %s' % err) for o, a in OPTS: if o == '-c': self.COMMAND = a if o == '-f': self.CFG = a if o == '-o': self.OUTPUT_FILE = a if o == '-p': self.XML_DIRPATH = a if o == '-r': self.RECURSIVE = True if o == '-u': self.CSW_URL = a if o == '-x': self.XML = a if o == '-t': self.TIMEOUT = int(a) if o == '-y': self.FORCE_CONFIRM = True if self.CFG is None and self.COMMAND not in ['post_xml']: print('ERROR: -f <cfg> is a required argument') if self.COMMAND not in ['post_xml']: SCP = configparser.SafeConfigParser() SCP.readfp(open(self.CFG)) self.DATABASE = SCP.get('repository', 'database') self.URL = SCP.get('server', 'url') self.HOME = SCP.get('server', 'home') self.METADATA = dict(SCP.items('metadata:main')) try: self.TABLE = SCP.get('repository', 'table') except configparser.NoOptionError: self.TABLE = 'records' if self.COMMAND == 'setup_db': try: self.pycsw_admin.setup_db(self.DATABASE, self.TABLE, self.HOME) except Exception as err: print(err) print( 'ERROR: DB creation error. Database tables already exist') print('Delete tables or database to reinitialize') elif self.COMMAND == 'load_records': self.pycsw_admin.load_records(self.CONTEXT, self.DATABASE, self.TABLE, self.XML_DIRPATH, self.RECURSIVE, self.FORCE_CONFIRM) elif self.COMMAND == 'export_records': self.pycsw_admin.export_records(self.CONTEXT, self.DATABASE, self.TABLE, self.XML_DIRPATH) elif self.COMMAND == 'rebuild_db_indexes': self.pycsw_admin.rebuild_db_indexes(self.DATABASE, self.TABLE) elif self.COMMAND == 'optimize_db': self.pycsw_admin.optimize_db(self.CONTEXT, self.DATABASE, self.TABLE) elif self.COMMAND == 'refresh_harvested_records': self.pycsw_admin.refresh_harvested_records(self.CONTEXT, self.DATABASE, self.TABLE, self.URL) elif self.COMMAND == 'gen_sitemap': self.pycsw_admin.gen_sitemap(self.CONTEXT, self.DATABASE, self.TABLE, self.URL, self.OUTPUT_FILE) elif self.COMMAND == 'post_xml': grass.message( self.pycsw.core.admin.post_xml(self.CSW_URL, self.XML, self.TIMEOUT)) elif self.COMMAND == 'delete_records': self.pycsw_admin.delete_records(self.CONTEXT, self.DATABASE, self.TABLE)
def install_toolbox_xml(url, name): # read metadata from remote server (toolboxes) url = url + "toolboxes.xml" data = dict() try: f = urlopen(url, proxies=PROXIES) tree = etree.fromstring(f.read()) for tnode in tree.findall('toolbox'): clist = list() for cnode in tnode.findall('correlate'): clist.append(cnode.get('code')) mlist = list() for mnode in tnode.findall('task'): mlist.append(mnode.get('name')) code = tnode.get('code') data[code] = { 'name' : tnode.get('name'), 'correlate' : clist, 'modules' : mlist, } except HTTPError: grass.error(_("Unable to read metadata file from the remote server")) if not data: grass.warning(_("No metadata available")) return if name not in data: grass.warning(_("No metadata available for <%s>") % name) return fXML = os.path.join(options['prefix'], 'toolboxes.xml') # create an empty file if not exists if not os.path.exists(fXML): write_xml_modules(fXML) # read XML file fo = open(fXML, 'r') tree = etree.fromstring(fo.read()) fo.close() # update tree tnode = None for node in tree.findall('toolbox'): if node.get('code') == name: tnode = node break tdata = data[name] if tnode is not None: # update existing node for cnode in tnode.findall('correlate'): tnode.remove(cnode) for mnode in tnode.findall('task'): tnode.remove(mnode) else: # create new node for task tnode = etree.Element('toolbox', attrib = { 'name' : tdata['name'], 'code' : name }) tree.append(tnode) for cname in tdata['correlate']: cnode = etree.Element('correlate', attrib = { 'code' : cname }) tnode.append(cnode) for tname in tdata['modules']: mnode = etree.Element('task', attrib = { 'name' : tname }) tnode.append(mnode) write_xml_toolboxes(fXML, tree)
def main(): layers = options['map'].split(',') if len(layers) < 2: grass.error(_("At least 2 maps are required")) tmpfile = grass.tempfile() for map in layers: if not grass.find_file(map, element = 'cell')['file']: grass.fatal(_("Raster map <%s> not found") % map) grass.write_command('d.text', color = 'black', size = 4, line = 1, stdin = "CORRELATION") os.environ['GRASS_PNG_READ'] = 'TRUE' colors = "red black blue green gray violet".split() line = 2 iloop = 0 jloop = 0 for iloop, i in enumerate(layers): for jloop, j in enumerate(layers): if i != j and iloop <= jloop: color = colors[0] colors = colors[1:] colors.append(color) grass.write_command('d.text', color = color, size = 4, line = line, stdin = "%s %s" % (i, j)) line += 1 ofile = file(tmpfile, 'w') grass.run_command('r.stats', flags = 'cnA', input = (i, j), stdout = ofile) ofile.close() ifile = file(tmpfile, 'r') first = True for l in ifile: f = l.rstrip('\r\n').split(' ') x = float(f[0]) y = float(f[1]) if first: minx = maxx = x miny = maxy = y first = False if minx > x: minx = x if maxx < x: maxx = x if miny > y: miny = y if maxy < y: maxy = y ifile.close() kx = 100.0/(maxx-minx+1) ky = 100.0/(maxy-miny+1) p = grass.feed_command('d.graph', color = color) ofile = p.stdin ifile = file(tmpfile, 'r') for l in ifile: f = l.rstrip('\r\n').split(' ') x = float(f[0]) y = float(f[1]) ofile.write("icon + 0.1 %f %f\n" % ((x-minx+1) * kx, (y-miny+1) * ky)) ifile.close() ofile.close() p.wait() grass.try_remove(tmpfile)
def install_extension_xml(url, mlist): if len(mlist) > 1: # read metadata from remote server (toolboxes) install_toolbox_xml(url, options['extension']) # read metadata from remote server (modules) url = url + "modules.xml" data = {} bList = [] try: f = urlopen(url, proxies=PROXIES) try: tree = etree.fromstring(f.read()) except: grass.warning(_("Unable to parse '%s'. Metadata file not updated.") % url) return bList for mnode in tree.findall('task'): name = mnode.get('name') if name not in mlist: continue fList = list() bnode = mnode.find('binary') windows = sys.platform == 'win32' if bnode is not None: for fnode in bnode.findall('file'): path = fnode.text.split('/') if path[0] == 'bin': bList.append(path[-1]) if windows: path[-1] += '.exe' elif path[0] == 'scripts': bList.append(path[-1]) if windows: path[-1] += '.py' fList.append(os.path.sep.join(path)) desc, keyw = get_optional_params(mnode) data[name] = { 'desc' : desc, 'keyw' : keyw, 'files' : fList, } except: grass.error(_("Unable to read metadata file from the remote server")) if not data: grass.warning(_("No metadata available")) return [] fXML = os.path.join(options['prefix'], 'modules.xml') # create an empty file if not exists if not os.path.exists(fXML): write_xml_modules(fXML) # read XML file fo = open(fXML, 'r') tree = etree.fromstring(fo.read()) fo.close() # update tree for name in mlist: tnode = None for node in tree.findall('task'): if node.get('name') == name: tnode = node break if name not in data: grass.warning(_("No metadata found for <%s>") % name) continue ndata = data[name] if tnode is not None: # update existing node dnode = tnode.find('description') if dnode is not None: dnode.text = ndata['desc'] knode = tnode.find('keywords') if knode is not None: knode.text = ndata['keyw'] bnode = tnode.find('binary') if bnode is not None: tnode.remove(bnode) bnode = etree.Element('binary') for f in ndata['files']: fnode = etree.Element('file') fnode.text = f bnode.append(fnode) tnode.append(bnode) else: # create new node for task tnode = etree.Element('task', attrib = { 'name' : name }) dnode = etree.Element('description') dnode.text = ndata['desc'] tnode.append(dnode) knode = etree.Element('keywords') knode.text = ndata['keyw'] tnode.append(knode) bnode = etree.Element('binary') for f in ndata['files']: fnode = etree.Element('file') fnode.text = f bnode.append(fnode) tnode.append(bnode) tree.append(tnode) write_xml_modules(fXML, tree) return bList
def _render(self, cmd, env): try: return grass.run_command(cmd[0], env=env, **cmd[1]) except CalledModuleError as e: grass.error(e) return 1
def run(self, argv): if len(argv) == 0: grass.error("Nothing to do. Set args") return try: OPTS, ARGS = getopt.getopt(argv, "c:f:ho:p:ru:x:s:t:y") except getopt.GetoptError as err: grass.error("\nERROR: %s" % err) for o, a in OPTS: if o == "-c": self.COMMAND = a if o == "-f": self.CFG = a if o == "-o": self.OUTPUT_FILE = a if o == "-p": self.XML_DIRPATH = a if o == "-r": self.RECURSIVE = True if o == "-u": self.CSW_URL = a if o == "-x": self.XML = a if o == "-t": self.TIMEOUT = int(a) if o == "-y": self.FORCE_CONFIRM = True if self.CFG is None and self.COMMAND not in ["post_xml"]: print("ERROR: -f <cfg> is a required argument") if self.COMMAND not in ["post_xml"]: SCP = configparser.SafeConfigParser() SCP.readfp(open(self.CFG)) self.DATABASE = SCP.get("repository", "database") self.URL = SCP.get("server", "url") self.HOME = SCP.get("server", "home") self.METADATA = dict(SCP.items("metadata:main")) try: self.TABLE = SCP.get("repository", "table") except configparser.NoOptionError: self.TABLE = "records" if self.COMMAND == "setup_db": try: self.pycsw_admin.setup_db(self.DATABASE, self.TABLE, self.HOME) except Exception as err: print(err) print( "ERROR: DB creation error. Database tables already exist") print("Delete tables or database to reinitialize") elif self.COMMAND == "load_records": self.pycsw_admin.load_records( self.CONTEXT, self.DATABASE, self.TABLE, self.XML_DIRPATH, self.RECURSIVE, self.FORCE_CONFIRM, ) elif self.COMMAND == "export_records": self.pycsw_admin.export_records(self.CONTEXT, self.DATABASE, self.TABLE, self.XML_DIRPATH) elif self.COMMAND == "rebuild_db_indexes": self.pycsw_admin.rebuild_db_indexes(self.DATABASE, self.TABLE) elif self.COMMAND == "optimize_db": self.pycsw_admin.optimize_db(self.CONTEXT, self.DATABASE, self.TABLE) elif self.COMMAND == "refresh_harvested_records": self.pycsw_admin.refresh_harvested_records(self.CONTEXT, self.DATABASE, self.TABLE, self.URL) elif self.COMMAND == "gen_sitemap": self.pycsw_admin.gen_sitemap(self.CONTEXT, self.DATABASE, self.TABLE, self.URL, self.OUTPUT_FILE) elif self.COMMAND == "post_xml": grass.message( self.pycsw.core.admin.post_xml(self.CSW_URL, self.XML, self.TIMEOUT)) elif self.COMMAND == "delete_records": self.pycsw_admin.delete_records(self.CONTEXT, self.DATABASE, self.TABLE)
flag = 1 def _get_csw(catalog_url, timeout=10): """function to init owslib.csw.CatalogueServiceWeb""" # connect to the server try: catalog = CatalogueServiceWeb(catalog_url, timeout=timeout) return catalog except ExceptionReport, err: msg = 'Error connecting to service: %s' % err except ValueError, err: msg = 'Value Error: %s' % err except Exception, err: msg = 'Unknown Error: %s' % err grass.error('CSW Connection error: %s' % msg) return False def main(): if not _get_csw(options['source']): return harvest(options['source'], options['destination']) if __name__ == "__main__": options, flags = grass.parser() main()
def argParser( self, defaultConf, load_records, loadRecurs, setupDB, exportRecord, indexes, optimize, harvest, siteOut, deleteAll, cswURL, cswXML, force, ): if defaultConf is None: grass.error("Configure file is not exist") args = [] args.append("-c") if load_records: args.append("load_records") args.append("-p") args.append(load_records) if loadRecurs: args.append("-r") if force: args.append("y") return args if setupDB: args.append("setup_db") args.append("-f") args.append(defaultConf) return args if exportRecord: args.append("export_records") args.append("-p") args.append(exportRecord) args.append("-f") args.append(defaultConf) return args if indexes: args.append("rebuild_db_indexes") args.append("-f") args.append(defaultConf) return args if optimize: args.append("optimize_db") args.append("-f") args.append(defaultConf) return args if harvest: args.append("refresh_harvested_records") args.append("-f") args.append(defaultConf) return args if siteOut: args.append("gen_sitemaps") args.append("-o") args.append(siteOut) args.append("-f") args.append(defaultConf) return args if deleteAll: args.append("delete_records") args.append("-f") args.append(defaultConf) if force: args.append("y") return args if cswURL and cswXML: args.append("post_xml") args.append("-u") args.append(cswURL) args.append("-x") args.append(cswXML) args.append("-f") args.append(defaultConf) return args return False