def main(): input = options['input'] layer = options['layer'] format = options['format'] dsn = options['dsn'] table = options['table'] if format.lower() == 'dbf': format = "ESRI_Shapefile" if format.lower() == 'csv': olayer = basename(dsn, 'csv') else: olayer = None #is there a simpler way of testing for --overwrite? dbffile = input + '.dbf' if os.path.exists(dbffile) and not grass.overwrite(): grass.fatal(_("File <%s> already exists") % dbffile) if olayer: if grass.run_command('v.out.ogr', quiet=True, input=input, layer=layer, dsn=dsn, format=format, type='point,line,area', olayer=olayer) != 0: sys.exit(1) else: if grass.run_command('v.out.ogr', quiet=True, input=input, layer=layer, dsn=dsn, format=format, type='point,line,area') != 0: sys.exit(1) if format == "ESRI_Shapefile": exts = ['shp', 'shx', 'prj'] if dsn.endswith('.dbf'): outname = basename(dsn, 'dbf') for ext in exts: try_remove("%s.%s" % (outname, ext)) outname += '.dbf' else: for ext in exts: try_remove(os.path.join(dsn, "%s.%s" % (input, ext))) outname = os.path.join(dsn, input + ".dbf") elif format.lower() == 'csv': outname = dsn + '.csv' else: outname = input grass.message(_("Exported table <%s>") % outname)
def main(): input = options['input'] layer = options['layer'] format = options['format'] output = options['output'] table = options['table'] if format.lower() == 'dbf': format = "ESRI_Shapefile" if format.lower() == 'csv': olayer = basename(output, 'csv') else: olayer = None #is there a simpler way of testing for --overwrite? dbffile = input + '.dbf' if os.path.exists(dbffile) and not grass.overwrite(): grass.fatal(_("File <%s> already exists") % dbffile) if olayer: try: grass.run_command('v.out.ogr', quiet=True, input=input, layer=layer, output=output, format=format, type='point,line,area', olayer=olayer) except CalledModuleError: grass.fatal(_("Module <%s> failed") % 'v.out.ogr') else: try: grass.run_command('v.out.ogr', quiet=True, input=input, layer=layer, output=output, format=format, type='point,line,area') except CalledModuleError: grass.fatal(_("Module <%s> failed") % 'v.out.ogr') if format == "ESRI_Shapefile": exts = ['shp', 'shx', 'prj'] if output.endswith('.dbf'): outname = basename(output, 'dbf') for ext in exts: try_remove("%s.%s" % (outname, ext)) outname += '.dbf' else: for ext in exts: try_remove(os.path.join(output, "%s.%s" % (input, ext))) outname = os.path.join(output, input + ".dbf") elif format.lower() == 'csv': outname = output + '.csv' else: outname = input grass.message(_("Exported table <%s>") % outname)
def main(): input = options['input'] layer = options['layer'] format = options['format'] output = options['output'] table = options['table'] if format.lower() == 'dbf': format = "ESRI_Shapefile" if format.lower() == 'csv': olayer = basename(output, 'csv') else: olayer = None # is there a simpler way of testing for --overwrite? dbffile = input + '.dbf' if os.path.exists(dbffile) and not gcore.overwrite(): gcore.fatal(_("File <%s> already exists") % dbffile) if olayer: try: gcore.run_command('v.out.ogr', quiet=True, input=input, layer=layer, output=output, format=format, type='point,line,area', olayer=olayer) except CalledModuleError: gcore.fatal(_("Module <%s> failed") % 'v.out.ogr') else: try: gcore.run_command('v.out.ogr', quiet=True, input=input, layer=layer, output=output, format=format, type='point,line,area') except CalledModuleError: gcore.fatal(_("Module <%s> failed") % 'v.out.ogr') if format == "ESRI_Shapefile": exts = ['shp', 'shx', 'prj'] if output.endswith('.dbf'): outname = basename(output, 'dbf') for ext in exts: try_remove("%s.%s" % (outname, ext)) outname += '.dbf' else: for ext in exts: try_remove(os.path.join(output, "%s.%s" % (input, ext))) outname = os.path.join(output, input + ".dbf") elif format.lower() == 'csv': outname = output + '.csv' else: outname = input gcore.message(_("Exported table <%s>") % outname)
def main(): global gisbase, browser, browser_name if flags['i'] and flags['t']: grass.fatal(_("Flags -%c and -%c are mutually exclusive") % ('i', 't')) special = None if flags['i']: special = 'index' elif flags['t']: special = 'topics' if flags['m']: start = start_man else: start = start_browser entry = options['entry'] gisbase = os.environ['GISBASE'] browser = os.getenv('GRASS_HTML_BROWSER', '') if sys.platform == 'darwin': # hack for MacOSX browser_name = os.getenv('GRASS_HTML_BROWSER_MACOSX', '..').split('.')[2] elif sys.platform == 'cygwin': # hack for Cygwin browser_name = basename(browser, 'exe') else: browser_name = basename(browser) # keep order! # first test for index... if special: start(special) else: start(entry) return 0
def main(): global gisbase, browser, browser_name if flags['i'] and flags['t']: grass.fatal(_("Flags -%c and -%c are mutually exclusive") % ('i', 't')) special = None if flags['i']: special = 'index' elif flags ['t']: special = 'topics' if flags['m']: start = start_man else: start = start_browser entry = options['entry'] gisbase = os.environ['GISBASE'] browser = os.getenv('GRASS_HTML_BROWSER', '') if sys.platform == 'darwin': # hack for MacOSX browser_name = os.getenv('GRASS_HTML_BROWSER_MACOSX', '..').split('.')[2] elif sys.platform == 'cygwin': # hack for Cygwin browser_name = basename(browser, 'exe') else: browser_name = basename(browser) # keep order! # first test for index... if special: start(special) else: start(entry) return 0
def main(): global gisbase, browser, browser_name if flags["i"] and flags["t"]: grass.fatal(_("Flags -%c and -%c are mutually exclusive") % ("i", "t")) special = None if flags["i"]: special = "index" elif flags["t"]: special = "topics" if flags["m"]: start = start_man else: start = start_browser entry = options["entry"] gisbase = os.environ["GISBASE"] browser = os.getenv("GRASS_HTML_BROWSER", "") if sys.platform == "darwin": # hack for MacOSX browser_name = os.getenv("GRASS_HTML_BROWSER_MACOSX", "..").split(".")[2] elif sys.platform == "cygwin": # hack for Cygwin browser_name = basename(browser, "exe") else: browser_name = basename(browser) # keep order! # first test for index... if special: start(special) else: start(entry) return 0
def main(): indb = options['database'] prefix = options['basename'] env = grass.gisenv() #fix sqlite3 db field string multibyte character problem sys.setdefaultencoding('utf-8') # check if 3d or not if flags['z']: d3 = 'z' else: d3 = '' owrite = grass.overwrite() # check if location it is latlong if grass.locn_is_latlong(): locn = True else: locn = False # connection to sqlite geopaparazzi database import sqlite3 conn = sqlite3.connect(indb) curs = conn.cursor() # if it is not a latlong location create a latlong location on the fly if not locn: # create new location and move to it creating new gisrc file new_loc = basename(grass.tempfile(create=False)) new_loc_name = 'geopaparazzi_%s' % new_loc grass.create_location(dbase=env['GISDBASE'], epsg='4326', location=new_loc_name, desc='Temporary location for v.in.geopaparazzi') grc = os.getenv('GISRC') shutil.copyfile(grc, grc + '.old') newrc = open(grc, 'w') newrc.write('GISDBASE: %s\n' % env['GISDBASE']) newrc.write('LOCATION_NAME: %s\n' % new_loc_name) newrc.write('MAPSET: PERMANENT\n') newrc.write('GRASS_GUI: text\n') newrc.close() grass.run_command('db.connect', flags="d", quiet=True) # load bookmarks if flags['b']: # check if elements in bookmarks table are more the 0 if checkEle(curs, 'bookmarks') != 0: bookname = prefix + '_book' pois = importGeom(bookname, 'bookmarks', curs, owrite, '') sql = 'CREATE TABLE %s (cat int, text text)' % bookname grass.write_command('db.execute', input='-', stdin=sql) # select attributes sql = "select text from bookmarks order by _id" allattri = returnClear(curs, sql) # add values using insert statement idcat = 1 for row in allattri: values = "%d,'%s'" % (idcat, str(row)) sql = "insert into %s values(%s)" % (bookname, values) grass.write_command('db.execute', input='-', stdin=sql) idcat += 1 # at the end connect table to vector grass.run_command('v.db.connect', map=bookname, table=bookname, quiet=True) else: grass.warning(_("No bookmarks found, escape them")) # load images if flags['i']: # check if elements in images table are more the 0 if checkEle(curs, 'images') != 0: imagename = prefix + '_image' pois = importGeom(imagename, 'images', curs, owrite, d3) sql = 'CREATE TABLE %s (cat int, azim int, ' % imagename sql += 'path text, ts text, text text)' grass.write_command('db.execute', input='-', stdin=sql) # select attributes sql = "select azim, path, ts, text from images order by _id" allattri = returnAll(curs, sql) # add values using insert statement idcat = 1 for row in allattri: values = "%d,'%d','%s','%s','%s'" % (idcat, row[0], str(row[1]), str(row[2]), str(row[3])) sql = "insert into %s values(%s)" % (imagename, values) grass.write_command('db.execute', input='-', stdin=sql) idcat += 1 # at the end connect table to vector grass.run_command('v.db.connect', map=imagename, table=imagename, quiet=True) else: grass.warning(_("No images found, escape them")) # if tracks or nodes should be imported create a connection with sqlite3 # load notes if flags['n']: # check if elements in notes table are more the 0 if checkEle(curs, 'notes') != 0: # select each categories categories = returnClear(curs, "select cat from notes group by cat") # for each category for cat in categories: # select lat, lon for create point layer catname = prefix + '_notes_' + cat pois = importGeom(catname, 'notes', curs, owrite, d3, cat) # select form to understand the number forms = returnClear(curs, "select _id from notes where cat = '%s' " "and form is not null order by _id" % cat) # if number of form is different from 0 and number of point # remove the vector because some form it is different if len(forms) != 0 and len(forms) != len(pois): grass.run_command('g.remove', flags='f', type='vector', name=catname, quiet=True) grass.warning(_("Vector %s not imported because number" " of points and form is different")) # if form it's 0 there is no form elif len(forms) == 0: # create table without form sql = 'CREATE TABLE %s (cat int, ts text, ' % catname sql += 'text text, geopap_cat text)' grass.write_command('db.execute', input='-', stdin=sql) # select attributes sql = "select ts, text, cat from notes where "\ "cat='%s' order by _id" % cat allattri = returnAll(curs, sql) # add values using insert statement idcat = 1 for row in allattri: values = "%d,'%s','%s','%s'" % (idcat, str(row[0]), str(row[1]), str(row[2])) sql = "insert into %s values(%s)" % (catname, values) grass.write_command('db.execute', input='-', stdin=sql) idcat += 1 # at the end connect table to vector grass.run_command('v.db.connect', map=catname, table=catname, quiet=True) # create table with form else: # select all the attribute sql = "select ts, text, cat, form from notes where "\ "cat='%s' order by _id" % cat allattri = returnAll(curs, sql) # return string of form's categories too create table keys = returnFormKeys(allattri) sql = 'CREATE TABLE %s (cat int, ts text, ' % catname sql += 'text text, geopap_cat text %s)' % keys grass.write_command('db.execute', input='-', stdin=sql) # it's for the number of categories idcat = 1 # for each feature insert value for row in allattri: values = "%d,'%s','%s','%s'," % (idcat, str(row[0]), str(row[1]), str(row[2])) values += returnFormValues(row[3]) sql = "insert into %s values(%s)" % (catname, values) grass.write_command('db.execute', input='-', stdin=sql) idcat += 1 # at the end connect table with vector grass.run_command('v.db.connect', map=catname, table=catname, quiet=True) else: grass.warning(_("No notes found, escape them")) # load tracks if flags['t']: # check if elements in bookmarks table are more the 0 if checkEle(curs, 'gpslogs') != 0: tracksname = prefix + '_tracks' # define string for insert data at the end tracks = '' # return ids of tracks ids = returnClear(curs, "select _id from gpslogs") # for each track for i in ids: # select all the points coordinates tsel = "select lon, lat" if flags['z']: tsel += ", altim" tsel += " from gpslog_data where logid=%s order by _id" % i trackpoints = returnAll(curs, tsel) wpoi = '\n'.join(['|'.join([str(col) for col in row]) for row in trackpoints]) tracks += "%s\n" % wpoi if flags['z']: tracks += 'NaN|NaN|Nan\n' else: tracks += 'NaN|Nan\n' # import lines try: grass.write_command('v.in.lines', flags=d3, input='-', out=tracksname, stdin=tracks, overwrite=owrite, quiet=True) except CalledModuleError: grass.fatal(_("Error importing %s" % tracksname)) # create table for line sql = 'CREATE TABLE %s (cat int, startts text, ' % tracksname sql += 'endts text, text text, color text, width int)' grass.write_command('db.execute', input='-', stdin=sql) sql = "select logid, startts, endts, text, color, width from" \ " gpslogs, gpslogsproperties where gpslogs._id=" \ "gpslogsproperties.logid" # return attributes allattri = returnAll(curs, sql) # for each line insert attribute for row in allattri: values = "%d,'%s','%s','%s','%s',%d" % (row[0], str(row[1]), str(row[2]), str(row[3]), str(row[4]), row[5]) sql = "insert into %s values(%s)" % (tracksname, values) grass.write_command('db.execute', input='-', stdin=sql) # at the end connect map with table grass.run_command('v.db.connect', map=tracksname, table=tracksname, quiet=True) else: grass.warning(_("No tracks found, escape them")) # if location it's not latlong reproject it if not locn: # copy restore the original location shutil.copyfile(grc + '.old', grc) # reproject bookmarks if flags['b'] and checkEle(curs, 'bookmarks') != 0: grass.run_command('v.proj', quiet=True, input=bookname, location='geopaparazzi_%s' % new_loc, mapset='PERMANENT') # reproject images if flags['i'] and checkEle(curs, 'images') != 0: grass.run_command('v.proj', quiet=True, input=imagename, location='geopaparazzi_%s' % new_loc, mapset='PERMANENT') # reproject notes if flags['n'] and checkEle(curs, 'notes') != 0: for cat in categories: catname = prefix + '_node_' + cat grass.run_command('v.proj', quiet=True, input=catname, location='geopaparazzi_%s' % new_loc, mapset='PERMANENT') # reproject track if flags['t'] and checkEle(curs, 'gpslogs') != 0: grass.run_command('v.proj', quiet=True, input=tracksname, location='geopaparazzi_%s' % new_loc, mapset='PERMANENT')
def main(): filename = options['file'] type = options['type'] vect = options['vect'] e00tmp = str(os.getpid()) #### check for avcimport if not grass.find_program('avcimport'): grass.fatal(_("'avcimport' program not found, install it first") + "\n" + "http://avce00.maptools.org") #### check for e00conv if not grass.find_program('e00conv'): grass.fatal(_("'e00conv' program not found, install it first") + "\n" + "http://avce00.maptools.org") # check that the user didn't use all three, which gets past the parser. if type not in ['point','line','area']: grass.fatal(_('Must specify one of "point", "line", or "area".')) e00name = basename(filename, 'e00') # avcimport only accepts 13 chars: e00shortname = e00name[:13] #check if this is a split E00 file (.e01, .e02 ...): merging = False if os.path.exists(e00name + '.e01') or os.path.exists(e00name + '.E01'): grass.message(_("Found that E00 file is split into pieces (.e01, ...). Merging...")) merging = True if vect: name = vect else: name = e00name ### do import #make a temporary directory tmpdir = grass.tempfile() try_remove(tmpdir) os.mkdir(tmpdir) files = glob.glob(e00name + '.e[0-9][0-9]') + glob.glob(e00name + '.E[0-9][0-9]') for f in files: shutil.copy(f, tmpdir) #change to temporary directory to later avoid removal problems (rm -r ...) os.chdir(tmpdir) #check for binay E00 file (we can just check if import fails): #avcimport doesn't set exist status :-( if merging: files.sort() filename = "%s.cat.%s.e00" % (e00name, e00tmp) outf = file(filename, 'wb') for f in files: inf = file(f, 'rb') shutil.copyfileobj(inf, outf) inf.close() outf.close() nuldev = file(os.devnull, 'w+') grass.message(_("An error may appear next which will be ignored...")) if grass.call(['avcimport', filename, e00shortname], stdout = nuldev, stderr = nuldev) == 1: grass.message(_("E00 ASCII found and converted to Arc Coverage in current directory")) else: grass.message(_("E00 Compressed ASCII found. Will uncompress first...")) try_remove(e00shortname) try_remove(info) grass.call(['e00conv', filename, e00tmp + '.e00']) grass.message(_("...converted to Arc Coverage in current directory")) grass.call(['avcimport', e00tmp + '.e00', e00shortname], stderr = nuldev) #SQL name fix: name = name.replace('-', '_') ## let's import... grass.message(_("Importing %ss...") % type) layer = dict(point = 'LAB', line = 'ARC', area = ['LAB','ARC']) itype = dict(point = 'point', line = 'line', area = 'centroid') if grass.run_command('v.in.ogr', flags = 'o', dsn = e00shortname, layer = layer[type], type = itype[type], output = name) != 0: grass.fatal(_("An error occurred while running v.in.ogr")) grass.message(_("Imported <%s> vector map <%s>.") % (type, name)) #### clean up the mess for root, dirs, files in os.walk('.', False): for f in files: path = os.path.join(root, f) try_remove(path) for d in dirs: path = os.path.join(root, d) try_rmdir(path) os.chdir('..') os.rmdir(tmpdir) #### end grass.message(_("Done.")) # write cmd history: grass.vector_history(name)
def main(): fileorig = options["input"] filevect = options["output"] if not filevect: filevect = basename(fileorig, "txt") # are we in LatLong location? s = grass.read_command("g.proj", flags="j") kv = parse_key_val(s) if kv["+proj"] != "longlat": grass.fatal(_("This module only operates in LatLong/WGS84 locations")) #### setup temporary file tmpfile = grass.tempfile() coldescs = [ ("RC", "rc integer"), ("UFI", "uf1 integer"), ("UNI", "uni integer"), ("LAT", "lat double precision"), ("LONG", "lon double precision"), ("DMS_LAT", "dms_lat integer"), ("DMS_LONG", "dms_long integer"), ("MGRS", "mgrs varchar(15)"), ("JOG", "jog varchar(7)"), ("FC", "fc varchar(1)"), ("DSG", "dsg varchar(6)"), ("PC", "pc integer"), ("CC1", "cci varchar(255)"), ("ADM1", "adm1 varchar(2)"), ("POP", "pop integer"), ("ELEV", "elev double precision"), ("CC2", "cc2 varchar(255)"), ("NT", "nt varchar(2)"), ("LC", "lc varchar(3)"), ("SHORT_FORM", "shortform varchar(128)"), ("GENERIC", "generic varchar(128)"), ("SORT_NAME_RO", "sortnamero varchar(255)"), ("FULL_NAME_RO", "fullnamero varchar(255)"), ("FULL_NAME_ND_RO", "funamesdro varchar(255)"), ("SORT_NAME_RG", "sortnamerg varchar(255)"), ("FULL_NAME_RG", "fullnamerg varchar(255)"), ("FULL_NAME_ND_RG", "funamesdrg varchar(255)"), ("NOTE", "note varchar(4000)"), ("MODIFY_DATE", "mod_date date"), ("DISPLAY", "display varchar(255)"), ("NAME_RANK", "namerank integer"), ("NAME_LINK", "namelink integer"), ("TRANSL_CD", "translcd varchar(32)"), ("NM_MODIFY_DATE", "nmmodifydate varchar(10)"), ] colnames = [desc[0] for desc in coldescs] coltypes = dict([(desc[0], "integer" in desc[1]) for desc in coldescs]) header = None num_places = 0 inf = open(fileorig) outf = open(tmpfile, "wb") for line in inf: fields = line.rstrip("\r\n").split("\t") if not header: header = fields continue vars = dict(zip(header, fields)) fields2 = [] for col in colnames: if col in vars: if coltypes[col] and vars[col] == "": fields2.append("0") else: fields2.append(vars[col]) else: if coltypes[col]: fields2.append("0") else: fields2.append("") line2 = ";".join(fields2) + "\n" outf.write(line2) num_places += 1 outf.close() inf.close() grass.message(_("Converted %d place names.") % num_places) # TODO: fix dms_lat,dms_long DDMMSS -> DD:MM:SS # Solution: # IN=DDMMSS # DEG=`echo $IN | cut -b1,2` # MIN=`echo $IN | cut -b3,4` # SEC=`echo $IN | cut -b5,6` # DEG_STR="$DEG:$MIN:$SEC" # modifications (to match DBF 10 char column name limit): # short_form -> shortform # sort_name -> sortname # full_name -> fullname # full_name_sd -> funamesd # pump data into GRASS: columns = [desc[1] for desc in coldescs] grass.run_command( "v.in.ascii", cat=0, x=5, y=4, sep=";", input=tmpfile, output=filevect, columns=columns, ) try_remove(tmpfile) # write cmd history: vgrass.vector_history(filevect)
def main(): global tmp global sine_cosine_replic, outercircle, vector global totalvalidnumber, totalnumber, maxradius map = options['map'] undef = options['undef'] eps = options['eps'] xgraph = flags['x'] tmp = grass.tempfile() if eps and xgraph: grass.fatal(_("Please select only one output method")) #### check if we have xgraph (if no EPS output requested) if xgraph and not grass.find_program('xgraph'): grass.fatal(_("xgraph required, please install first (www.xgraph.org)")) ################################# # this file contains everthing: rawfile = tmp + "_raw" rawf = file(rawfile, 'w') grass.run_command('r.stats', flags = '1', input = map, stdout = rawf) rawf.close() rawf = file(rawfile) totalnumber = 0 for line in rawf: totalnumber += 1 rawf.close() grass.message(_("Calculating statistics for polar diagram... (be patient)")) #wipe out NULL data and undef data if defined by user # - generate degree binned to integer, eliminate NO DATA (NULL): # change 360 to 0 to close polar diagram: rawf = file(rawfile) nvals = 0 sumcos = 0 sumsin = 0 freq = {} for line in rawf: line = line.rstrip('\r\n') if line in ['*', undef]: continue nvals += 1 x = float(line) rx = math.radians(x) sumcos += math.cos(rx) sumsin += math.sin(rx) ix = round(x) if ix == 360: ix = 0 if ix in freq: freq[ix] += 1 else: freq[ix] = 1 rawf.close() totalvalidnumber = nvals if totalvalidnumber == 0: grass.fatal(_("No data pixel found")) ################################# # unit vector on raw data converted to radians without no data: unitvector = (sumcos / nvals, sumsin / nvals) ################################# # how many are there?: occurrences = [(math.radians(x), freq[x]) for x in freq] occurrences.sort() # find the maximum value maxradius = max([f for a, f in occurrences]) # now do cos() sin() sine_cosine = [(math.cos(a) * f, math.sin(a) * f) for a, f in occurrences] sine_cosine_replic = ['"Real data angles'] + sine_cosine + sine_cosine[0:1] if eps or xgraph: outercircle = [] outercircle.append('"All Data incl. NULLs') scale = 1.0 * totalnumber / totalvalidnumber * maxradius for i in range(0, 361): a = math.radians(i) x = math.cos(a) * scale y = math.sin(a) * scale outercircle.append((x, y)) # fix vector length to become visible (x? of $MAXRADIUS): vector = [] vector.append('"Avg. Direction\n') vector.append((0, 0)) vector.append((unitvector[0] * maxradius, unitvector[1] * maxradius)) ########################################################### # Now output: if eps: psout = basename(eps, 'eps') + '.eps' plot_eps(psout) elif xgraph: plot_xgraph() else: plot_dgraph() grass.message(_("Average vector:")) grass.message(_("direction: %.1f degrees CCW from East") % math.degrees(math.atan2(unitvector[1], unitvector[0]))) grass.message(_("magnitude: %.1f percent of fullscale") % (100 * math.hypot(unitvector[0], unitvector[1])))
def main(): global tmp global sine_cosine_replic, outercircle, vector global totalvalidnumber, totalnumber, maxradius map = options['map'] undef = options['undef'] eps = options['output'] xgraph = flags['x'] tmp = gcore.tempfile() if eps and xgraph: gcore.fatal(_("Please select only one output method")) # check if we have xgraph (if no EPS output requested) if xgraph and not gcore.find_program('xgraph'): gcore.fatal( _("xgraph required, please install first (www.xgraph.org)")) raster_map_required(map) ################################# # this file contains everything: rawfile = tmp + "_raw" rawf = file(rawfile, 'w') gcore.run_command('r.stats', flags='1', input=map, stdout=rawf) rawf.close() rawf = file(rawfile) totalnumber = 0 for line in rawf: totalnumber += 1 rawf.close() gcore.message( _("Calculating statistics for polar diagram... (be patient)")) # wipe out NULL data and undef data if defined by user # - generate degree binned to integer, eliminate NO DATA (NULL): # change 360 to 0 to close polar diagram: rawf = file(rawfile) nvals = 0 sumcos = 0 sumsin = 0 freq = {} for line in rawf: line = line.rstrip('\r\n') if line in ['*', undef]: continue nvals += 1 x = float(line) rx = math.radians(x) sumcos += math.cos(rx) sumsin += math.sin(rx) ix = round(x) if ix == 360: ix = 0 if ix in freq: freq[ix] += 1 else: freq[ix] = 1 rawf.close() totalvalidnumber = nvals if totalvalidnumber == 0: gcore.fatal(_("No data pixel found")) ################################# # unit vector on raw data converted to radians without no data: unitvector = (sumcos / nvals, sumsin / nvals) ################################# # how many are there?: occurrences = sorted([(math.radians(x), freq[x]) for x in freq]) # find the maximum value maxradius = max([f for a, f in occurrences]) # now do cos() sin() sine_cosine = [(math.cos(a) * f, math.sin(a) * f) for a, f in occurrences] sine_cosine_replic = ['"Real data angles'] + sine_cosine + sine_cosine[0:1] if eps or xgraph: outercircle = [] outercircle.append('"All Data incl. NULLs') scale = 1.0 * totalnumber / totalvalidnumber * maxradius for i in range(0, 361): a = math.radians(i) x = math.cos(a) * scale y = math.sin(a) * scale outercircle.append((x, y)) # fix vector length to become visible (x? of $MAXRADIUS): vector = [] vector.append('"Avg. Direction\n') vector.append((0, 0)) vector.append((unitvector[0] * maxradius, unitvector[1] * maxradius)) ########################################################### # Now output: if eps: psout = basename(eps, 'eps') + '.eps' plot_eps(psout) elif xgraph: plot_xgraph() else: plot_dgraph() gcore.message(_("Average vector:")) gcore.message( _("direction: %.1f degrees CCW from East") % math.degrees(math.atan2(unitvector[1], unitvector[0]))) gcore.message( _("magnitude: %.1f percent of fullscale") % (100 * math.hypot(unitvector[0], unitvector[1])))
def main(): filename = options['input'] type = options['type'] vect = options['output'] e00tmp = str(os.getpid()) # check for avcimport if not gcore.find_program('avcimport'): gcore.fatal( _("'avcimport' program not found, install it first") + "\n" + "http://avce00.maptools.org") # check for e00conv if not gcore.find_program('e00conv'): gcore.fatal( _("'e00conv' program not found, install it first") + "\n" + "http://avce00.maptools.org") # check that the user didn't use all three, which gets past the parser. if type not in ['point', 'line', 'area']: gcore.fatal(_('Must specify one of "point", "line", or "area".')) e00name = basename(filename, 'e00') # avcimport only accepts 13 chars: e00shortname = e00name[:13] # check if this is a split E00 file (.e01, .e02 ...): merging = False if os.path.exists(e00name + '.e01') or os.path.exists(e00name + '.E01'): gcore.message( _("Found that E00 file is split into pieces (.e01, ...)." " Merging...")) merging = True if vect: name = vect else: name = e00name # do import # make a temporary directory tmpdir = gcore.tempfile() try_remove(tmpdir) os.mkdir(tmpdir) files = glob.glob(e00name + '.e[0-9][0-9]') + glob.glob(e00name + '.E[0-9][0-9]') for f in files: shutil.copy(f, tmpdir) # change to temporary directory to later avoid removal problems (rm -r ...) os.chdir(tmpdir) # check for binay E00 file (we can just check if import fails): # avcimport doesn't set exist status :-( if merging: files.sort() filename = "%s.cat.%s.e00" % (e00name, e00tmp) outf = file(filename, 'wb') for f in files: inf = file(f, 'rb') shutil.copyfileobj(inf, outf) inf.close() outf.close() nuldev = file(os.devnull, 'w+') gcore.message(_("An error may appear next which will be ignored...")) if gcore.call(['avcimport', filename, e00shortname], stdout=nuldev, stderr=nuldev) == 1: gcore.message( _("E00 ASCII found and converted to Arc Coverage in " "current directory")) else: gcore.message( _("E00 Compressed ASCII found. Will uncompress first...")) try_remove(e00shortname) gcore.call(['e00conv', filename, e00tmp + '.e00']) gcore.message(_("...converted to Arc Coverage in current directory")) gcore.call(['avcimport', e00tmp + '.e00', e00shortname], stderr=nuldev) # SQL name fix: name = name.replace('-', '_') # let's import... gcore.message(_("Importing %ss...") % type) layer = dict(point='LAB', line='ARC', area=['LAB', 'ARC']) itype = dict(point='point', line='line', area='centroid') try: gcore.run_command('v.in.ogr', flags='o', input=e00shortname, layer=layer[type], type=itype[type], output=name) except CalledModuleError: gcore.fatal(_("An error occurred while running v.in.ogr")) gcore.message(_("Imported <%s> vector map <%s>.") % (type, name)) # clean up the mess for root, dirs, files in os.walk('.', False): for f in files: path = os.path.join(root, f) try_remove(path) for d in dirs: path = os.path.join(root, d) try_rmdir(path) os.chdir('..') os.rmdir(tmpdir) # end gcore.message(_("Done.")) # write cmd history: gvect.vector_history(name)
def main(): indb = options["database"] prefix = options["basename"] env = grass.gisenv() # fix sqlite3 db field string multibyte character problem sys.setdefaultencoding("utf-8") # check if 3d or not if flags["z"]: d3 = "z" else: d3 = "" owrite = grass.overwrite() # check if location it is latlong if grass.locn_is_latlong(): locn = True else: locn = False # connection to sqlite geopaparazzi database import sqlite3 conn = sqlite3.connect(indb) curs = conn.cursor() # if it is not a latlong location create a latlong location on the fly if not locn: # create new location and move to it creating new gisrc file new_loc = basename(grass.tempfile(create=False)) new_loc_name = "geopaparazzi_%s" % new_loc grass.create_location( dbase=env["GISDBASE"], epsg="4326", location=new_loc_name, desc="Temporary location for v.in.geopaparazzi", ) grc = os.getenv("GISRC") shutil.copyfile(grc, grc + ".old") newrc = open(grc, "w") newrc.write("GISDBASE: %s\n" % env["GISDBASE"]) newrc.write("LOCATION_NAME: %s\n" % new_loc_name) newrc.write("MAPSET: PERMANENT\n") newrc.write("GRASS_GUI: text\n") newrc.close() grass.run_command("db.connect", flags="d", quiet=True) # load bookmarks if flags["b"]: # check if elements in bookmarks table are more the 0 if checkEle(curs, "bookmarks") != 0: bookname = prefix + "_book" pois = importGeom(bookname, "bookmarks", curs, owrite, "") sql = "CREATE TABLE %s (cat int, text text)" % bookname grass.write_command("db.execute", input="-", stdin=sql) # select attributes sql = "select text from bookmarks order by _id" allattri = returnClear(curs, sql) # add values using insert statement idcat = 1 for row in allattri: values = "%d,'%s'" % (idcat, str(row)) sql = "insert into %s values(%s)" % (bookname, values) grass.write_command("db.execute", input="-", stdin=sql) idcat += 1 # at the end connect table to vector grass.run_command("v.db.connect", map=bookname, table=bookname, quiet=True) else: grass.warning(_("No bookmarks found, escape them")) # load images if flags["i"]: # check if elements in images table are more the 0 if checkEle(curs, "images") != 0: imagename = prefix + "_image" pois = importGeom(imagename, "images", curs, owrite, d3) sql = "CREATE TABLE %s (cat int, azim int, " % imagename sql += "path text, ts text, text text)" grass.write_command("db.execute", input="-", stdin=sql) # select attributes sql = "select azim, path, ts, text from images order by _id" allattri = returnAll(curs, sql) # add values using insert statement idcat = 1 for row in allattri: values = "%d,'%d','%s','%s','%s'" % ( idcat, row[0], str(row[1]), str(row[2]), str(row[3]), ) sql = "insert into %s values(%s)" % (imagename, values) grass.write_command("db.execute", input="-", stdin=sql) idcat += 1 # at the end connect table to vector grass.run_command( "v.db.connect", map=imagename, table=imagename, quiet=True ) else: grass.warning(_("No images found, escape them")) # if tracks or nodes should be imported create a connection with sqlite3 # load notes if flags["n"]: # check if elements in notes table are more the 0 if checkEle(curs, "notes") != 0: # select each categories categories = returnClear(curs, "select cat from notes group by cat") # for each category for cat in categories: # select lat, lon for create point layer catname = prefix + "_notes_" + cat pois = importGeom(catname, "notes", curs, owrite, d3, cat) # select form to understand the number forms = returnClear( curs, "select _id from notes where cat = '%s' " "and form is not null order by _id" % cat, ) # if number of form is different from 0 and number of point # remove the vector because some form it is different if len(forms) != 0 and len(forms) != len(pois): grass.run_command( "g.remove", flags="f", type="vector", name=catname, quiet=True ) grass.warning( _( "Vector %s not imported because number" " of points and form is different" ) ) # if form it's 0 there is no form elif len(forms) == 0: # create table without form sql = "CREATE TABLE %s (cat int, ts text, " % catname sql += "text text, geopap_cat text)" grass.write_command("db.execute", input="-", stdin=sql) # select attributes sql = ( "select ts, text, cat from notes where " "cat='%s' order by _id" % cat ) allattri = returnAll(curs, sql) # add values using insert statement idcat = 1 for row in allattri: values = "%d,'%s','%s','%s'" % ( idcat, str(row[0]), str(row[1]), str(row[2]), ) sql = "insert into %s values(%s)" % (catname, values) grass.write_command("db.execute", input="-", stdin=sql) idcat += 1 # at the end connect table to vector grass.run_command( "v.db.connect", map=catname, table=catname, quiet=True ) # create table with form else: # select all the attribute sql = ( "select ts, text, cat, form from notes where " "cat='%s' order by _id" % cat ) allattri = returnAll(curs, sql) # return string of form's categories too create table keys = returnFormKeys(allattri) sql = "CREATE TABLE %s (cat int, ts text, " % catname sql += "text text, geopap_cat text %s)" % keys grass.write_command("db.execute", input="-", stdin=sql) # it's for the number of categories idcat = 1 # for each feature insert value for row in allattri: values = "%d,'%s','%s','%s'," % ( idcat, str(row[0]), str(row[1]), str(row[2]), ) values += returnFormValues(row[3]) sql = "insert into %s values(%s)" % (catname, values) grass.write_command("db.execute", input="-", stdin=sql) idcat += 1 # at the end connect table with vector grass.run_command( "v.db.connect", map=catname, table=catname, quiet=True ) else: grass.warning(_("No notes found, escape them")) # load tracks if flags["t"]: # check if elements in bookmarks table are more the 0 if checkEle(curs, "gpslogs") != 0: tracksname = prefix + "_tracks" # define string for insert data at the end tracks = "" # return ids of tracks ids = returnClear(curs, "select _id from gpslogs") # for each track for i in ids: # select all the points coordinates tsel = "select lon, lat" if flags["z"]: tsel += ", altim" tsel += " from gpslog_data where logid=%s order by _id" % i trackpoints = returnAll(curs, tsel) wpoi = "\n".join( ["|".join([str(col) for col in row]) for row in trackpoints] ) tracks += "%s\n" % wpoi if flags["z"]: tracks += "NaN|NaN|Nan\n" else: tracks += "NaN|Nan\n" # import lines try: grass.write_command( "v.in.lines", flags=d3, input="-", out=tracksname, stdin=tracks, overwrite=owrite, quiet=True, ) except CalledModuleError: grass.fatal(_("Error importing %s" % tracksname)) # create table for line sql = "CREATE TABLE %s (cat int, startts text, " % tracksname sql += "endts text, text text, color text, width int)" grass.write_command("db.execute", input="-", stdin=sql) sql = ( "select logid, startts, endts, text, color, width from" " gpslogs, gpslogsproperties where gpslogs._id=" "gpslogsproperties.logid" ) # return attributes allattri = returnAll(curs, sql) # for each line insert attribute for row in allattri: values = "%d,'%s','%s','%s','%s',%d" % ( row[0], str(row[1]), str(row[2]), str(row[3]), str(row[4]), row[5], ) sql = "insert into %s values(%s)" % (tracksname, values) grass.write_command("db.execute", input="-", stdin=sql) # at the end connect map with table grass.run_command( "v.db.connect", map=tracksname, table=tracksname, quiet=True ) else: grass.warning(_("No tracks found, escape them")) # if location it's not latlong reproject it if not locn: # copy restore the original location shutil.copyfile(grc + ".old", grc) # reproject bookmarks if flags["b"] and checkEle(curs, "bookmarks") != 0: grass.run_command( "v.proj", quiet=True, input=bookname, location="geopaparazzi_%s" % new_loc, mapset="PERMANENT", ) # reproject images if flags["i"] and checkEle(curs, "images") != 0: grass.run_command( "v.proj", quiet=True, input=imagename, location="geopaparazzi_%s" % new_loc, mapset="PERMANENT", ) # reproject notes if flags["n"] and checkEle(curs, "notes") != 0: for cat in categories: catname = prefix + "_node_" + cat grass.run_command( "v.proj", quiet=True, input=catname, location="geopaparazzi_%s" % new_loc, mapset="PERMANENT", ) # reproject track if flags["t"] and checkEle(curs, "gpslogs") != 0: grass.run_command( "v.proj", quiet=True, input=tracksname, location="geopaparazzi_%s" % new_loc, mapset="PERMANENT", )
def main(): fileorig = options['input'] filevect = options['output'] if not filevect: filevect = basename(fileorig, 'txt') #are we in LatLong location? s = grass.read_command("g.proj", flags='j') kv = parse_key_val(s) if kv['+proj'] != 'longlat': grass.fatal(_("This module only operates in LatLong/WGS84 locations")) #### setup temporary file tmpfile = grass.tempfile() coldescs = [("RC", "rc integer"), ("UFI", "uf1 integer"), ("UNI", "uni integer"), ("LAT", "lat double precision"), ("LONG", "lon double precision"), ("DMS_LAT", "dms_lat varchar(6)"), ("DMS_LONG", "dms_long varchar(7)"), ("UTM", "utm varchar(4)"), ("JOG", "jog varchar(7)"), ("FC", "fc varchar(1)"), ("DSG", "dsg varchar(5)"), ("PC", "pc integer"), ("CC1", "cci varchar(2)"), ("ADM1", "adm1 varchar(2)"), ("ADM2", "adm2 varchar(200)"), ("DIM", "dim integer"), ("CC2", "cc2 varchar(2)"), ("NT", "nt varchar(1)"), ("LC", "lc varchar(3)"), ("SHORT_FORM", "shortform varchar(128)"), ("GENERIC", "generic varchar(128)"), ("SORT_NAME", "sortname varchar(200)"), ("FULL_NAME", "fullname varchar(200)"), ("FULL_NAME_ND", "funamesd varchar(200)"), ("MODIFY_DATE", "mod_date date")] colnames = [desc[0] for desc in coldescs] coltypes = dict([(desc[0], 'integer' in desc[1]) for desc in coldescs]) header = None num_places = 0 inf = file(fileorig) outf = file(tmpfile, 'wb') for line in inf: fields = line.rstrip('\r\n').split('\t') if not header: header = fields continue vars = dict(zip(header, fields)) fields2 = [] for col in colnames: if col in vars: if coltypes[col] and vars[col] == '': fields2.append('0') else: fields2.append(vars[col]) else: if coltypes[col]: fields2.append('0') else: fields2.append('') line2 = ';'.join(fields2) + '\n' outf.write(line2) num_places += 1 outf.close() inf.close() grass.message(_("Converted %d place names.") % num_places) #TODO: fix dms_lat,dms_long DDMMSS -> DD:MM:SS # Solution: # IN=DDMMSS # DEG=`echo $IN | cut -b1,2` # MIN=`echo $IN | cut -b3,4` # SEC=`echo $IN | cut -b5,6` # DEG_STR="$DEG:$MIN:$SEC" #modifications (to match DBF 10 char column name limit): # short_form -> shortform # sort_name -> sortname # full_name -> fullname # full_name_sd -> funamesd # pump data into GRASS: columns = [desc[1] for desc in coldescs] grass.run_command('v.in.ascii', cat=0, x=5, y=4, sep=';', input=tmpfile, output=filevect, columns=columns) try_remove(tmpfile) # write cmd history: vgrass.vector_history(filevect)
def main(): filename = options["input"] type = options["type"] vect = options["output"] e00tmp = str(os.getpid()) # check for avcimport if not gcore.find_program("avcimport"): gcore.fatal(_("'avcimport' program not found, install it first") + "\n" + "http://avce00.maptools.org") # check for e00conv if not gcore.find_program("e00conv"): gcore.fatal(_("'e00conv' program not found, install it first") + "\n" + "http://avce00.maptools.org") # check that the user didn't use all three, which gets past the parser. if type not in ["point", "line", "area"]: gcore.fatal(_('Must specify one of "point", "line", or "area".')) e00name = basename(filename, "e00") # avcimport only accepts 13 chars: e00shortname = e00name[:13] # check if this is a split E00 file (.e01, .e02 ...): merging = False if os.path.exists(e00name + ".e01") or os.path.exists(e00name + ".E01"): gcore.message(_("Found that E00 file is split into pieces (.e01, ...)." " Merging...")) merging = True if vect: name = vect else: name = e00name # do import # make a temporary directory tmpdir = gcore.tempfile() try_remove(tmpdir) os.mkdir(tmpdir) files = glob.glob(e00name + ".e[0-9][0-9]") + glob.glob(e00name + ".E[0-9][0-9]") for f in files: shutil.copy(f, tmpdir) # change to temporary directory to later avoid removal problems (rm -r ...) os.chdir(tmpdir) # check for binay E00 file (we can just check if import fails): # avcimport doesn't set exist status :-( if merging: files.sort() filename = "%s.cat.%s.e00" % (e00name, e00tmp) outf = file(filename, "wb") for f in files: inf = file(f, "rb") shutil.copyfileobj(inf, outf) inf.close() outf.close() nuldev = file(os.devnull, "w+") gcore.message(_("An error may appear next which will be ignored...")) if gcore.call(["avcimport", filename, e00shortname], stdout=nuldev, stderr=nuldev) == 1: gcore.message(_("E00 ASCII found and converted to Arc Coverage in " "current directory")) else: gcore.message(_("E00 Compressed ASCII found. Will uncompress first...")) try_remove(e00shortname) gcore.call(["e00conv", filename, e00tmp + ".e00"]) gcore.message(_("...converted to Arc Coverage in current directory")) gcore.call(["avcimport", e00tmp + ".e00", e00shortname], stderr=nuldev) # SQL name fix: name = name.replace("-", "_") # let's import... gcore.message(_("Importing %ss...") % type) layer = dict(point="LAB", line="ARC", area=["LAB", "ARC"]) itype = dict(point="point", line="line", area="centroid") try: gcore.run_command("v.in.ogr", flags="o", input=e00shortname, layer=layer[type], type=itype[type], output=name) except CalledModuleError: gcore.fatal(_("An error occurred while running v.in.ogr")) gcore.message(_("Imported <%s> vector map <%s>.") % (type, name)) # clean up the mess for root, dirs, files in os.walk(".", False): for f in files: path = os.path.join(root, f) try_remove(path) for d in dirs: path = os.path.join(root, d) try_rmdir(path) os.chdir("..") os.rmdir(tmpdir) # end gcore.message(_("Done.")) # write cmd history: gvect.vector_history(name)
def main(): input = options["input"] layer = options["layer"] format = options["format"] output = options["output"] table = options["table"] if format.lower() == "dbf": format = "ESRI_Shapefile" if format.lower() == "csv": olayer = basename(output, "csv") else: olayer = None # is there a simpler way of testing for --overwrite? dbffile = input + ".dbf" if os.path.exists(dbffile) and not gcore.overwrite(): gcore.fatal(_("File <%s> already exists") % dbffile) if olayer: try: gcore.run_command( "v.out.ogr", quiet=True, input=input, layer=layer, output=output, format=format, type="point,line,area", olayer=olayer, ) except CalledModuleError: gcore.fatal(_("Module <%s> failed") % "v.out.ogr") else: try: gcore.run_command( "v.out.ogr", quiet=True, input=input, layer=layer, output=output, format=format, type="point,line,area", ) except CalledModuleError: gcore.fatal(_("Module <%s> failed") % "v.out.ogr") if format == "ESRI_Shapefile": exts = ["shp", "shx", "prj"] if output.endswith(".dbf"): outname = basename(output, "dbf") for ext in exts: try_remove("%s.%s" % (outname, ext)) outname += ".dbf" else: for ext in exts: try_remove(os.path.join(output, "%s.%s" % (input, ext))) outname = os.path.join(output, input + ".dbf") elif format.lower() == "csv": outname = output + ".csv" else: outname = input gcore.message(_("Exported table <%s>") % outname)
def main(): fileorig = options['input'] filevect = options['output'] if not filevect: filevect = basename(fileorig, 'txt') #are we in LatLong location? s = grass.read_command("g.proj", flags='j') kv = parse_key_val(s) if kv['+proj'] != 'longlat': grass.fatal(_("This module only operates in LatLong/WGS84 locations")) #### setup temporary file tmpfile = grass.tempfile() coldescs = [("RC", "rc integer"), ("UFI", "uf1 integer"), ("UNI", "uni integer"), ("LAT", "lat double precision"), ("LONG", "lon double precision"), ("DMS_LAT", "dms_lat varchar(6)"), ("DMS_LONG", "dms_long varchar(7)"), ("UTM", "utm varchar(4)"), ("JOG", "jog varchar(7)"), ("FC", "fc varchar(1)"), ("DSG", "dsg varchar(5)"), ("PC", "pc integer"), ("CC1", "cci varchar(2)"), ("ADM1", "adm1 varchar(2)"), ("ADM2", "adm2 varchar(200)"), ("DIM", "dim integer"), ("CC2", "cc2 varchar(2)"), ("NT", "nt varchar(1)"), ("LC", "lc varchar(3)"), ("SHORT_FORM", "shortform varchar(128)"), ("GENERIC", "generic varchar(128)"), ("SORT_NAME", "sortname varchar(200)"), ("FULL_NAME", "fullname varchar(200)"), ("FULL_NAME_ND","funamesd varchar(200)"), ("MODIFY_DATE", "mod_date date")] colnames = [desc[0] for desc in coldescs] coltypes = dict([(desc[0], 'integer' in desc[1]) for desc in coldescs]) header = None num_places = 0 inf = file(fileorig) outf = file(tmpfile, 'wb') for line in inf: fields = line.rstrip('\r\n').split('\t') if not header: header = fields continue vars = dict(zip(header, fields)) fields2 = [] for col in colnames: if col in vars: if coltypes[col] and vars[col] == '': fields2.append('0') else: fields2.append(vars[col]) else: if coltypes[col]: fields2.append('0') else: fields2.append('') line2 = ';'.join(fields2) + '\n' outf.write(line2) num_places += 1 outf.close() inf.close() grass.message(_("Converted %d place names.") % num_places) #TODO: fix dms_lat,dms_long DDMMSS -> DD:MM:SS # Solution: # IN=DDMMSS # DEG=`echo $IN | cut -b1,2` # MIN=`echo $IN | cut -b3,4` # SEC=`echo $IN | cut -b5,6` # DEG_STR="$DEG:$MIN:$SEC" #modifications (to match DBF 10 char column name limit): # short_form -> shortform # sort_name -> sortname # full_name -> fullname # full_name_sd -> funamesd # pump data into GRASS: columns = [desc[1] for desc in coldescs] grass.run_command('v.in.ascii', cat = 0, x = 5, y = 4, sep = ';', input = tmpfile, output = filevect, columns = columns) try_remove(tmpfile) # write cmd history: vgrass.vector_history(filevect)