def __init__( self, code, lon=None, lat=None, hgt=None, robg="B10", cosy="NZGD2000", rdate="2000.01.01", comment="", ): self.code = code markdata = GDB.get(code) if not markdata: raise RuntimeError(code + " is not in the geodetic database") gdbcoord = [coord.longitude, coord.latitude, coord.height] if lon is None or lat is None or hgt is None: lon, lat, hgt = gdbcoord self.lon = lon self.lat = lat self.hgt = hgt self.robg = robg self.cosy = cosy self.rdate = rdate self.comment = comment self.gdbCoord = gdbcoord self.xyz0 = GRS80.xyz(self.gdbCoord) self.enu_axes = GRS80.enu_axes(self.gdbCoord[0], self.gdbCoord[1]) pass
def getMarkData( self, code, index=None, fillDays=False, after=None, before=None, increment=1.0, xyz0=None, xyz0Date=None, ): """ Get the time series for a geodetic mark. Retrieves the official coordinates, calculates the deformation model at the required dates, and constructs a timeseries from them. Can supply a list of dates as an index, a from date, to date, and increment, or use the default dates. The reference coordinate can be explicitly supplied, or calculated for a reference date. Otherwise the NZGD2000 XYZ value will be used. """ if self._deformationModelDirectory is None: raise RuntimeError( "GDB timeseries deformation moel transformation is not defined" ) if self._itrfTransformation is None: raise RuntimeError("GDB timeseries ITRF transformation is not defined") markdata = GDB.get(code) if markdata is None: raise RuntimeError( "GDB timeseries for " + code + " not available - mark not in GDB" ) coord = markdata.coordinate lon, lat, hgt = coord.longitude, coord.latitude, coord.height markxyz = GRS80.xyz(lon, lat, hgt) function = lambda d: GRS80.xyz(self._itrfTransformation(lon, lat, hgt, d)) if xyz0 is None and xyz0Date is not None: xyz0 = function(xyz0Date) if xyz0 is None: xyz0 = markxyz gdbts = CORS_Timeseries.FunctionTimeseries( function, code=code, solutiontype="gdb", index=index, fillDays=fillDays, after=after, before=before, xyz0=xyz0, ) return GDB_Timeseries_Calculator.StationData(code, markxyz, markdata, gdbts)
def _load(self): if self._loaded: return # Call provider specific data load # Should return a dataframe with x,y,z indexed on the date/time of the each point data = self._loadData() if data is None: raise RuntimeError("No data provided for time series") if self._after is not None: data = data[data.index > self._after] if self._before is not None: data = data[data.index < self._before] if self._normalize: data.set_index(data.index.normalize(), inplace=True) data.sort_index(inplace=True) xyz = np.vstack((data.x, data.y, data.z)).T if self._transform: xyz = [self._transform(x) for x in xyz] data["x"] = xyz[:, 0] data["y"] = xyz[:, 1] data["z"] = xyz[:, 2] xyz0 = self._xyz0 xyz0 = np.array(xyz0) if xyz0 is not None else xyz[0] xyzenu = self._xyzenu xyzenu = np.array(xyzenu) if xyzenu is not None else xyz0 lon, lat = GRS80.geodetic(xyzenu)[:2] enu_axes = GRS80.enu_axes(lon, lat) diff = xyz - xyz0 enu = (xyz - xyz0).dot(enu_axes.T) data["e"] = enu[:, 0] data["n"] = enu[:, 1] data["u"] = enu[:, 2] self._xyz0 = xyz0 self._data = data
def locateInitialStations(self): xyz = np.array((0.0, 0.0, 0.0)) nxyz = 0 for s in list(self.stations.values()): s.setGxLocation() if s.xyz is not None: s.fixStation(s.xyz) xyz += s.xyz nxyz += 1 else: self.unlocated.append(s) if nxyz == 0: raise RuntimeError( "No station data or observations to locate network") xyz /= nxyz # Define a local topocentric vector lon, lat, hgt = GRS80.geodetic(xyz) self.enu_axes = GRS80.enu_axes(lon, lat) # Fix initial stations to orient horizontal angles for s in list(self.stations.values()): if s.xyz is not None: s.fixStation(s.xyz)
def changeSummary(self): dates, change, meanChange = self.calcChange() lon, lat, h = GRS80.geodetic(self.model.calc(dates[-1], enu=False)) if lon < 0: lon += 360.0 return dict( station=self.model.station, longitude=lon, latitude=lat, start_date=dates[0], end_date=dates[-1], dh=np.max(np.hypot(change[:, 0], change[:, 1])), dv=np.max(np.abs(change[:, 2])), end_de=change[-1, 0], end_dn=change[-1, 1], end_du=change[-1, 2], end_mean_de=meanChange[-1, 0], end_mean_dn=meanChange[-1, 1], end_mean_du=meanChange[-1, 2], )
def main(): __version__ = "1.0b" if sys.version_info.major != 3: print("This program requires python 3") sys.exit() try: import numpy except ImportError: print("This program requires the python numpy module is installed") sys.exit() import getopt import re import csv from LINZ.Geodetic.Ellipsoid import GRS80 from .Time import Time from .Model import Model from .Error import ModelDefinitionError, OutOfRangeError, UndefinedValueError syntax = """ CalcDeformation.py: program to calculate deformation at a specific time and place using the LINZ deformation model. Syntax: python CalcDeformation.py [options] input_file output_file python CalcDeformation.py [options] -x longitude latitude Options are: -d date The date at which to calculate the deformation --date=.. (default current date), or ":col_name" -b date The reference date at which to calculate deformation. The --base-date=.. default is to calculate relative to the reference coordinates -a Apply deformation to update coordinates --apply -s Subtract deformation to update coordinates --subtract -c lon:lat:h Column names for the longitude, latitude, and height columns --columns= in input_file. Default is lon, lat, hgt (optional) -f format Format of the input and output files - format can be one of --format= csv (excel compatible csv), tab (tab delimited), w (whitespace delimited). -e de:dn:du Displacement components to calculate, any of de, dn, du, --elements= --calculate= eh, ev, separated by colon characters (default is de,dn,du) -g grid Use a grid for input rather than an input file. The grid is --grid= entered as "min_lon:min_lat:max_lon:max_lat:nlon:nlat" -x Evaluate at longitude, latitude specified as arguments, rather --atpoint using input/output files -m dir Model base directory (default ../model) --model-dir= -v version Version of model to calculate (default latest version) --version= -r version Calculate change relative to previous (base) version --base-version= -p Calculate reverse patch corrections --patch -k Check that the model is correctly formatted - do not do any --check calculations -o submodel Only calculate for specified submodel (ndm or patch directory name) --only= -n ndp Number of decimal places to use for model values --ndpi= -l Just list details of the model (input and output file --list are ignored) -q Suppress optional output --quiet --cache=.. Cache options, ignore, clear, reset, use (default is use) --logging Enable trace logging """ def help(): print(syntax) sys.exit() modeldir = None version = None base_version = None subtract = False reverse_patch = False update = False columns = "lon lat hgt".split() usercolumns = False date_column = None format = "c" date = None base_date = None listonly = False check = False griddef = None inputfile = None outputfile = None quiet = False atpoint = False ptlon = None ptlat = None ndp = 4 calcfields = "de dn du eh ev".split() calculate = [0, 1, 2] islogging = False ell_a = 6378137.0 ell_rf = 298.257222101 ell_b = ell_a * (1 - 1.0 / ell_rf) ell_a2 = ell_a * ell_a ell_b2 = ell_b * ell_b if len(sys.argv) < 2: help() optlist = None args = None try: optlist, args = getopt.getopt( sys.argv[1:], "hd:b:uc:f:e:g:m:v:r:pn:o:kqlxas", [ "help", "date=", "base-date=", "apply", "subtract", "columns=", "format=", "elements=", "grid=", "model-dir=", "version=", "baseversion", "patch", "check", "ndp=", "only=", "list", "quiet", "cache=", "logging", "atpoint", ], ) except getopt.GetoptError: print(str(sys.exc_info()[1])) sys.exit() nargs = 2 maxargs = 2 usecache = True clearcache = False submodel = None for o, v in optlist: if o in ("-h", "--help"): help() if o in ("-l", "--list"): listonly = True nargs = 0 maxargs = 0 elif o in ("-k", "--check"): check = True nargs = 0 maxargs = 0 elif o in ("-d", "--date"): if v.startswith(":"): date_column = v[1:] else: try: date = Time.Parse(v) except: print("Invalid date " + v + " requested, must be formatted YYYY-MM-DD") sys.exit() elif o in ("-b", "--base-date"): try: base_date = Time.Parse(v) except: print("Invalid base date " + v + " requested, must be formatted YYYY-MM-DD") sys.exit() elif o in ("-a", "--apply"): update = True elif o in ("-s", "--subtract"): update = True subtract = True elif o in ("-c", "--columns"): usercolumns = True columns = v.split(":") if len(columns) not in (2, 3, 4): print( "Invalid columns specified - must be 2 or 3 colon separated column names" ) sys.exit() elif o in ("-f", "--format"): v = v.lower() if v in ("csv", "tab", "whitespace", "c", "t", "w"): format = v[:1] else: print( "Invalid format specified, must be one of csv, tab, or whitespace" ) sys.exit() elif o in ("-e", "--elements"): cols = v.lower().split(":") for c in cols: if c not in calcfields: print("Invalid calculated value " + c + " requested, must be one of " + " ".join(calcfields)) sys.exit() calculate = [i for i, c in enumerate(calcfields) if c in cols] elif o in ("-g", "--grid"): griddef = v nargs = 1 maxargs = 1 elif o in ("-x", "--atpoint"): atpoint = True nargs = 2 maxargs = 3 elif o in ("-m", "--model-dir"): modeldir = v elif o in ("-v", "--version"): m = re.match(r"^(\d{8})?(?:\-(\d{8}))?$", v) if not v or not m: print("Invalid model version " + v + " selected") sys.exit() if m.group(1): version = m.group(1) if m.group(2): base_version = m.group(2) else: version = m.group(2) subtract = True elif o in ("-r", "--base-version"): m = re.match(r"^\d{8}$", v) if not m: print("Invalid model base version " + v + " selected") base_version = v elif o in ("-p", "--patch"): reverse_patch = True elif o in ("-q", "--quiet"): quiet = True elif o in ("-o", "--only"): submodel = v elif o in ("-n", "--ndp"): ndp = int(v) elif o in ("--cache"): if v in ("use", "clear", "ignore", "reset"): usecache = v in ("use", "reset") clearcache = v in ("clear", "reset") else: print( "Invalid cache option - must be one of use, clear, reset, ignore" ) sys.exit() elif o in ("--logging"): import logging logging.basicConfig(level=logging.INFO) islogging = True else: print("Invalid parameter " + o + " specified") if len(args) > maxargs: print("Too many arguments specified: " + " ".join(args[nargs:])) sys.exit() elif len(args) < nargs: if atpoint: print("Require longitude and latitude coordinate") elif nargs - len(args) == 2: print("Require input and output filename arguments") else: print("Require output filename argument") sys.exit() if atpoint: try: ptlon = float(args[0]) ptlat = float(args[1]) pthgt = float(args[2]) if len(args) == 3 else 0.0 except: print("Invalid longitude/latitude " + args[0] + " " + args[1]) sys.exit() else: if nargs == 2: inputfile = args[0] if nargs > 0: outputfile = args[-1] if not modeldir: modeldir = os.environ.get("NZGD2000_DEF_MODEL") if not modeldir: from os.path import dirname, abspath, join, isdir, exists modeldir = join(dirname(dirname(abspath(sys.argv[0]))), "model") modelcsv = join(modeldir, "model.csv") if not isdir(modeldir) or not exists(modelcsv): modeldir = "model" # Load the model, print its description if listonly is requested model = None # Use a loop to make exiting easy... try: for loop in [1]: try: model = Model( modeldir, loadAll=check, useCache=usecache, clearCache=clearcache, loadSubmodel=submodel, ) except ModelDefinitionError: print("Error loading model:") print(str(sys.exc_info()[1])) break if check: print("The deformation model is correctly formatted") break # Set the model version if version is None: version = model.currentVersion() if version not in model.versions(): print("{0} is not a valid version of the deformation model". format(version)) break if reverse_patch: if base_version is None: versions = model.versions() base_version = versions[versions.index(version) - 1] model.setVersion(base_version, version) if date is None and date_column == None: date = model.datumEpoch() else: if not quiet: print( "Using a date or date column with a patch option - are you sure?" ) else: if date is None and date_column is None: date = Time.Now() model.setVersion(version, base_version) if listonly: print(model.description()) break # Determine the source for input reader = None headers = None colnos = None date_colno = None ncols = 2 dialect = csv.excel_tab if format == "t" else csv.excel if atpoint: pass elif griddef: # Grid format try: parts = griddef.split(":") if len(parts) != 6: raise ValueError("") min_lon = float(parts[0]) min_lat = float(parts[1]) max_lon = float(parts[2]) max_lat = float(parts[3]) nlon = int(parts[4]) nlat = int(parts[5]) if max_lon <= min_lon or max_lat <= min_lat or nlon < 2 or nlat < 2: raise ValueError("") dlon = (max_lon - min_lon) / (nlon - 1) dlat = (max_lat - min_lat) / (nlat - 1) def readf(): lat = min_lat - dlat for ilat in range(nlat): lat += dlat lon = min_lon - dlon for ilon in range(nlon): lon += dlon yield [str(lon), str(lat)] reader = readf except: print("Invalid grid definition", griddef) break colnos = [0, 1] headers = columns[0:2] else: try: instream = sys.stdin if inputfile == "-" else open( inputfile, "r") except: print("Cannot open input file " + inputfile) break # Whitespace if format == "w": headers = instream.readline().strip().split() def readf(): for line in instream: yield line.strip().split() reader = readf # CSV format else: csvrdr = csv.reader(instream, dialect=dialect) headers = next(csvrdr) def readf(): for r in csvrdr: yield r reader = readf ncols = len(headers) colnos = [] columnsvalid = True if len(columns) > 3: date_column = date_column or columns[3] if date_column != columns[3]: print("Inconsistent names specified for date column") break columns = columns[:3] if columns[2] == "": columns = columns[:2] for c in columns: if c in headers: colnos.append(headers.index(c)) elif c == "hgt" and not usercolumns: break else: print("Column", c, "missing in", inputfile) columnsvalid = False if not columnsvalid: break if date_column: if date_column in headers: date_colno = headers.index(date_column) else: print("Column", date_column, "missing in", inputfile) break # Create the output file if not quiet: action = "Updating with" if update else "Calculating" value = "patch correction" if reverse_patch else "deformation" vsnopt = ("between versions " + base_version + " and " + version if base_version else "for version " + version) datopt = ("the date in column " + date_column if date_column else str(date)) if base_date: datopt = "between " + str(base_date) + " and " + datopt else: datopt = "at " + datopt print("Deformation model " + model.name()) print("for datum " + model.datumName()) print(action + " " + value + " " + vsnopt + " " + datopt) if atpoint: defm = model.calcDeformation(ptlon, ptlat, date, base_date) if subtract: for i in range(3): defm[i] = -defm[i] if update: dedln, dndlt = GRS80.metres_per_degree(ptlon, ptlat) ptlon += defm[0] / dedln ptlat += defm[1] / dndlt pthgt += defm[2] print("{0:.9f} {1:.9f} {2:.4f}".format( ptlon, ptlat, pthgt)) elif quiet: print("{0:.{3}f} {1:.{3}f} {2:.{3}f}".format( defm[0], defm[1], defm[2], ndp)) else: print( "Deformation at {0:.6f} {1:.6f}: {2:.{5}f} {3:.{5}f} {4:.{5}f}" .format(ptlon, ptlat, defm[0], defm[1], defm[2], ndp)) break try: outstream = sys.stdout if outputfile == "-" else open( outputfile, "w") except: print("Cannot open output file", outputfile) break if not update: for c in calculate: headers.append(calcfields[c]) writefunc = None if format == "w": def writef(cols): outstream.write(" ".join(cols)) outstream.write("\n") writefunc = writef else: csvwrt = csv.writer(outstream, dialect=dialect) writefunc = csvwrt.writerow writefunc(headers) latcalc = None dedln = None dndlt = None nerror = 0 nrngerr = 0 nmissing = 0 ncalc = 0 nrec = 0 for data in reader(): nrec += 1 if len(data) < ncols: if not quiet: print("Skipping record", nrec, "as too few columns") continue else: data = data[:ncols] try: lon = float(data[colnos[0]]) lat = float(data[colnos[1]]) if date_colno is not None: date = data[date_colno] defm = model.calcDeformation(lon, lat, date, base_date) if subtract: for i in range(3): defm[i] = -defm[i] if update: dedln, dndlt = GRS80.metres_per_degree(lon, lat) lon += defm[0] / dedln lat += defm[1] / dndlt data[colnos[0]] = "{0:.9f}".format(lon) data[colnos[1]] = "{0:.9f}".format(lat) if len(colnos) > 2: hgt = float(data[colnos[2]]) hgt += defm[2] data[colnos[2]] = "{0:.4f}".format(hgt) else: for c in calculate: data.append("{0:.{1}f}".format(defm[c], ndp)) writefunc(data) ncalc += 1 except OutOfRangeError: nrngerr += 1 except UndefinedValueError: nmissing += 1 except: raise print(str(sys.exc_info()[1])) nerror += 1 if not quiet: print(ncalc, "deformation values calculated") if nrngerr > 0: print(nrngerr, "points were outside the valid range of the model") if nmissing > 0: print(nmissing, "deformation values were undefined in the model") except: errmsg = str(sys.exc_info()[1]) print(errmsg) if islogging: logging.info(errmsg) raise if model: model.close()
def main(): parser = argparse.ArgumentParser(description=""" Calculate the corrections to convert an ITRF coordinate to or from an NZGD2000 coordinate on a longitude/latitude grid. The corrections are generated in a comma separated (CSV) file.""") parser.add_argument("min_lon", type=float, help="Minimum longitude") parser.add_argument("min_lat", type=float, help="Minimum latitude") parser.add_argument("max_lon", type=float, help="Maximum longitude") parser.add_argument("max_lat", type=float, help="Maximum latitude") parser.add_argument("nlon", type=float, help="Number of longitude values") parser.add_argument("nlat", type=float, help="Number of latitude values") parser.add_argument("output_file", type=str, help="Output file name") parser.add_argument( "-d", "--date", type=str, default="now", help="Date of transformation (default current date)", ) parser.add_argument( "-i", "--itrf", type=str, default="ITRF2008", help="ITRF reference frame version (default ITRF2008)", ) parser.add_argument( "-m", "--model-dir", type=str, default="../model", help="Deformation model base directory (default ../model)", ) parser.add_argument( "-v", "--version", type=str, help="Version of deformation model to use (default current version)", ) parser.add_argument( "-t", "--type", choices=("llh", "enu", "xyz"), default="llh", help="Type of correction to calculate", ) parser.add_argument( "-s", "--size", action="store_true", help= "Use grid cell size (dlon,dlat) instead of number of values (nlon,nlat)", ) parser.add_argument( "-r", "--reverse", action="store_true", help="Generate grid to convert NZGD2000 to ITRF", ) parser.add_argument( "-o", "--order", choices="es en ws wn se sw ne nw".split(), default="es", help="Grid order (start corner and first axis to increment)", ) parser.add_argument("-q", "--quiet", action="store_true", help="Suppress output") parser.add_argument( "--cache", choices=("ignore", "clear", "reset", "use"), default="use", help="Deformation model cache option (requires pytables)", ) parser.add_argument("--logging", action="store_true", help="Enable trace logging") args = parser.parse_args() modeldir = args.model_dir version = args.version date = args.date try: date = Time.Parse(args.date) except: print("Invalid date " + v + " requested, must be formatted YYYY-MM-DD") sys.exit() outputfile = args.output_file itrf = args.itrf increment = args.size order = args.order reverse = args.reverse corrtype = args.type quiet = args.quiet usecache = args.cache in ("use", "reset") clearcache = args.cache in ("clear", "reset") if args.logging: logging.basicConfig(level=logging.INFO) if not modeldir: modeldir = os.environ.get("NZGD2000_DEF_MODEL") if not modeldir: from os.path import dirname, abspath, join, isdir, exists modeldir = join(dirname(dirname(abspath(sys.argv[0]))), "model") modelcsv = join(modeldir, "model.csv") if not isdir(modeldir) or not exists(modelcsv): modeldir = "model" # Use a loop to make exiting easy... for loop in [1]: # Setup the transformation transform = None try: transform = ITRF_NZGD2000.Transformation( itrf, toNZGD2000=not args.reverse, modeldir=modeldir, version=version, usecache=usecache, clearcache=clearcache, ) except ModelDefinitionError: print("Error loading model:") print(str(sys.exc_info()[1])) break except RuntimeError: print(str(sys.exc_info()[1])) break # Determine the source for input coords = [] try: min_lon = args.min_lon min_lat = args.min_lat max_lon = args.max_lon max_lat = args.max_lat if max_lon <= min_lon or max_lat <= min_lat: raise ValueError( "Minimum latitude or longitude larger than maximum") lonval = None latval = None if increment: dlon = args.nlon dlat = args.nlat if dlon <= 0 or dlat <= 0: raise ValueError("") lonval = np.arange(min_lon, max_lon + dlon * 0.99, dlon) latval = np.arange(min_lat, max_lat + dlat * 0.99, dlat) else: nlon = int(args.nlon) nlat = int(args.nlat) if nlon < 2 or nlat < 2: raise ValueError( "Must be at least two longitude and latitude values") lonval = np.linspace(min_lon, max_lon, nlon) latval = np.linspace(min_lat, max_lat, nlat) if "w" in order: lonval = lonval[::-1] if "n" in order: latval = latval[::-1] if order[0] in "ew": for lat in latval: for lon in lonval: coords.append((lon, lat)) else: for lon in lonval: for lat in latval: coords.append((lon, lat)) except ValueError as e: print("Invalid grid definition: " + str(e)) break # Create the output file if not quiet: if reverse: print("Calculating NZGD2000 to " + itrf + " corrections at " + str(date)) else: print("Calculating " + itrf + " to NZGD2000 corrections at " + str(date)) print("Deformation model " + transform.model.name() + " version " + transform.version) try: outstream = open(outputfile, "w") except: print("Cannot open output file", outputfile) break if corrtype == "llh": outstream.write("lon,lat,dlon,dlat,dhgt\n") elif corrtype == "enu": outstream.write("lon,lat,de,dn,dh\n") elif corrtype == "xyz": outstream.write("lon,lat,dx,dy,dz\n") ncalc = 0 nrngerr = 0 nmissing = 0 hgt = 0.0 rvs = -1.0 if reverse else 1.0 for lon, lat in coords: try: llh = transform(lon, lat, hgt, date) if corrtype == "llh": outstream.write( "{0:.5f},{1:.5f},{2:.9f},{3:.9f},{4:.4f}\n".format( lon, lat, rvs * (llh[0] - lon), rvs * (llh[1] - lat), rvs * llh[2], )) elif corrtype == "enu": dedln, dndlt = GRS80.metres_per_degree(lon, lat) outstream.write( "{0:.5f},{1:.5f},{2:.4f},{3:.4f},{4:.4f}\n".format( lon, lat, rvs * dedln * (llh[0] - lon), rvs * dndlt * (llh[1] - lat), rvs * llh[2], )) elif corrtype == "xyz": xyz0 = GRS80.xyz(lon, lat, hgt) xyz1 = GRS80.xyz(llh[0], llh[1], llh[2]) outstream.write( "{0:.5f},{1:.5f},{2:.4f},{3:.4f},{4:.4f}\n".format( lon, lat, rvs * (xyz1[0] - xyz0[0]), rvs * (xyz1[1] - xyz0[1]), rvs * (xyz1[2] - xyz0[2]), )) ncalc += 1 except OutOfRangeError: nrngerr += 1 except UndefinedValueError: nmissing += 1 except: raise print(str(sys.exc_info()[1])) nerror += 1 outstream.close() if not quiet: print("{0} corrections calculated".format(ncalc)) if nrngerr > 0: print( "{0} points were outside the valid range of the model".format( nrngerr)) if nmissing > 0: print("{0} deformation values were undefined in the model".format( nmissing))
def compare(codes=None, codesCoordFile=None, useCode=False, velocities=False, skipError=False, **files): ''' Compare two or more bernese coordinate files, and return a pandas DataFrame of common codes. File names or coord data are entered as key/value pairs where the key is used to identify the data in the Dataframe (ie coordinate columns are key_X, key_Y, key_Z) Can take a list of codes to include in the comparison as either a list [codes], string [codes], or bernese coordinate file containing the codes [codesCoordFile] If just two files are compared then the differences are included in the data frame. ''' coords = {} usecodes = None nfiles = 0 for f in files: nfiles += 1 crddata = files[f] if isinstance(crddata, basestring): crddata = read(crddata, useCode=useCode, skipError=skipError, velocities=velocities) coords[f] = crddata fcodes = set(crddata) if usecodes is None: usecodes = fcodes else: usecodes = usecodes.intersection(fcodes) if nfiles == 0: raise RuntimeError("No files specified in CoordFile.Compare") if usecodes is None or len(usecodes) == 0: raise RuntimeError("No common codes to compare in CoordFile.Compare") codelist = None if codes is not None: if isinstance(codes, basestring): codes = codes.split() usecodes = usecodes.intersection(set(codes)) if codesCoordFile is not None: cfcodes = read(codesCoordFile, useCode=useCode, skipError=skipError) usecodes = usecodes.intersection(set(cfcodes)) if len(usecodes) == 0: raise RuntimeError("No common codes selected in CoordFile.Compare") data = [] usecodes = sorted(usecodes) crdtypes = sorted(coords) calcdiff = len(crdtypes) == 2 for code in sorted(usecodes): codedata = [code] cdata = [coords[t][code] for t in crdtypes] lon, lat, hgt = GRS80.geodetic(cdata[0].xyz) if lon < 0: lon += 360.0 codedata.extend((lon, lat, hgt)) codedata.extend((c.flag for c in cdata)) for c in cdata: codedata.extend(c.xyz) if velocities: codedata.extend(c.vxyz or [np.Nan, np.Nan, np.Nan]) if calcdiff: xyz0 = np.array(cdata[0].xyz) xyz1 = np.array(cdata[1].xyz) dxyz = xyz1 - xyz0 denu = GRS80.enu_axes(lon, lat).dot(dxyz) codedata.extend(dxyz) codedata.extend(denu) codedata.append(la.norm(denu)) if velocities: xyz0 = np.array(cdata[0].vxyz or [np.NaN, np.NaN, np.NaN]) xyz1 = np.array(cdata[1].vxyz or [np.NaN, np.NaN, np.NaN]) dxyz = xyz1 - xyz0 denu = GRS80.enu_axes(lon, lat).dot(dxyz) codedata.extend(dxyz) codedata.extend(denu) codedata.append(la.norm(denu)) data.append(codedata) columns = ['code', 'lon', 'lat', 'hgt'] columns.extend((t + '_flg' for t in crdtypes)) for t in crdtypes: columns.extend((t + '_X', t + '_Y', t + '_Z')) if velocities: columns.extend((t + '_VX', t + '_VY', t + '_VZ')) if calcdiff: columns.extend(('diff_X', 'diff_Y', 'diff_Z', 'diff_E', 'diff_N', 'diff_U', 'offset')) if velocities: columns.extend(('diff_VX', 'diff_VY', 'diff_VZ', 'diff_VE', 'diff_VN', 'diff_VU', 'offsetV')) df = pd.DataFrame(data, columns=columns) df.set_index(df.code, inplace=True) return df
clists = [] for infile in (args.infile1, args.infile2): if not os.path.exists(infile): print "Input file {0} missing".format(infile) coords = [] with open(infile) as ifh: for l in ifh: p = l.split() if len(p) >= 2: coords.append([float(x) for x in p[:2]]) clists.append(coords) maxdiff = 0 npt = 0 with open(args.outfile, 'w') as of: for c1, c2 in zip(clists[0], clists[1]): dedln, dndlt = GRS80.metres_per_degree(*c1) de = (c2[0] - c1[0]) * dedln dn = (c2[1] - c1[1]) * dndlt diff = math.sqrt(de * de + dn * dn) if diff > maxdiff: maxdiff = diff npt += 1 of.write("{0:.8f} {1:.8f} {2:.8f} {3:.8f} {4:7.3f} {5:7.3f}\n".format( c1[0], c1[1], c2[0], c2[1], de, dn)) if args.summary_id: print "{0} {1} test points max diff {2:.3f}m".format( args.summary_id, npt, maxdiff)
def tryFixAngle(self): tried = [] unfixedAngles = self.unfixedAngles() fixedStations = self.getFixedStations() success = False write = self.write savedStatus = None # self.write=lambda x: None write("\nTrying fixing one angle\n") try: while len(unfixedAngles) > 0: # Find an angle to fix. Require that it connects # to at least one fixed station, which means that it # is observed at an unfixed station (otherwise its # orientation would be defined) maxmatched = 0 trialset = None pivotstation = None for haset in unfixedAngles: hafixed = [s for s in haset.stations if s in fixedStations] countfixed = len(hafixed) if countfixed > maxmatched: maxmatched = countfixed pivotstation = hafixed[0] trialset = haset if trialset is None: break # Try fixing one angle # Save the status, then set the orientation of the potential # angle set, then refix the pivot station (pivotstation). # This should calculate the coordinate of observation station # for the angles. pivotxyz = pivotstation.xyz savedStatus = self.resetStatus() trialset.setOrientation(0.0) trialstation = trialset.instStation write("\nFixing set at {0} connected to {1} stations\n".format( trialstation.code, maxmatched)) self.fixStation(pivotstation, pivotxyz) trialxyz = trialstation.estimatedLocation() if trialxyz is None: write(" Cannot determine pivoted coordinate for {0}\n". format(trialstation.code)) self.resetStatus(savedStatus) unfixedAngles.remove(trialset) continue # Reset the status and add the fixed orientation and coordinate self.resetStatus() trialset.setOrientation(0.0) self.fixStation(trialstation, trialxyz) # Fix as many stations as we can, but exclude chaining from previously # fixed stations - just want to try and fix new stations nunlocated = len(self.unlocated) nunlocated0 = nunlocated while self.tryLocateStation(exclude=fixedStations): if len(self.unlocated) >= nunlocated: break nunlocated = len(self.unlocated) if nunlocated == 0: break # See if we have enough common stations with original fix to # calculate orientation of trialset newfixed = self.getFixedStations() commonStations = [s for s in newfixed if s in fixedStations] write( "Fixed angle connects {0} stations - {1} have known coordinates\n" .format(len(newfixed), len(commonStations))) # If only one common station then cannot orient new stations... # Remove unfixed stations and try again. if len(commonStations) < 2: len0 = len(unfixedAngles) stillUnfixed = set() for hasset in unfixedAngles: if not haset.defined: stillUnfixed.add(haset) unfixedAngles = stillUnfixed len1 = len(unfixedAngles) if len1 >= len0: raise RuntimeError( "Failed to remove unfixedAngles in tryFixAngle") write("Failed to use fixed angle\n") self.resetStatus(savedStatus) continue # Record the updated positions self.ntrialangle += 1 if self.ntrialangle == 1: write("stnloccsv,code,trialid,pivot,lon,lat,hgt\n") for s in newfixed: pivot = ("pivot" if s == pivotstation else "match" if s in commonStations else "new") llh = GRS80.geodetic(s.xyz) write('stnloccsv,"{0}",{1},{2},{3:.8f},{4:.8f},{5:.4f}\n'. format(s.code, self.ntrialangle, pivot, llh[0], llh[1], llh[2])) # Find rotation to apply to new stations... enu = pivotstation.enu_axes angleref = None sumangle = 0.0 sumds = 0.0 for s in commonStations: if s == pivotstation: continue denu0 = enu.dot(fixedStations[s] - pivotxyz) denu1 = enu.dot(newfixed[s] - pivotxyz) angle0 = math.atan2(denu0[0], denu0[1]) angle1 = math.atan2(denu1[0], denu1[1]) angdif = angle1 - angle0 if angleref is None: angleref = angdif angdif -= angleref while angdif < -math.pi: angdif += math.pi * 2 while angdif > math.pi: angdif -= math.pi * 2 angdif += angleref ds = math.hypot(denu1[0], denu1[1]) sumangle += angdif * ds sumds += ds angdif = sumangle / sumds # Now restore fixed stations, reset the fixed angle # refix the pivot station, and recalculate stations # using the correct orientation at the trialset self.resetStatus(savedStatus) trialset.setOrientation(math.degrees(-angdif)) self.fixStation(pivotstation, pivotxyz) self.write = write nunlocated = len(self.unlocated) nunlocated0 = nunlocated success = False while self.tryLocateStation(): if len(self.unlocated) >= nunlocated: break nunlocated = len(self.unlocated) success = True if nunlocated == 0: break nunlocated = len(self.unlocated) if nunlocated >= nunlocated0: raise RuntimeError( "Failed to fix a station in tryFixAngle") self.write( "Successfully fixed {0} stations using fixed angle\n". format(nunlocated0 - nunlocated)) break if not success: if savedStatus is not None: self.resetStatus(savedStatus) write("Restored {0} original fixed stations\n".format( len(fixedStations))) finally: self.write = write return success
def setXyzOffset(self, denu, comment=None): denu = np.array(denu) xyz = self.xyz0 + self.enu_axes.T.dot(denu) self.lon, self.lat, self.hgt = GRS80.geodetic(xyz) if comment is not None: self.comment = comment
def fixStation(self, xyz, exclude=None): if self.xyz is None or True: self.locator.write("Fixing station {0}\n".format(self.code)) self.setXyz(xyz) # Fix any HA observation sets that can be defined hafixstations = set() # Don't calculate orientations at excluded stations exclude = exclude or {} for tostn in self.obs: if tostn is None or tostn.xyz is None: continue obs = self.obs[tostn] if "HA" in obs: for haobs in obs["HA"]: if haobs.obsset.defined: continue if haobs.obsset.instStation in exclude: continue stnfrom = self.station() stnto = tostn.station() if haobs.reverse: stnfrom, stnto = stnto, stnfrom az = stnfrom.azimuthTo(stnto) fromcode = stnfrom.code() self.locator.write( "Fixing orientation of HA observations {1} at {0}\n" .format(fromcode, haobs.obsset.id)) referredcodes = [ stn.code for stn in haobs.obsset.stations if stn.code != fromcode ] self.locator.write( " Provides azimuths to {0}\n".format( ", ".join(referredcodes))) haobs.obsset.setOrientation(az - haobs.obsvalue.value) if haobs.reverse: hafixstations.add(tostn) # Now work out coordinates can be calculated # Don't calculate trial coordinates based on joins between # excluded sets. exclude = exclude or {} if self not in exclude: exclude = {} for tostn in self.obs: if tostn is None or tostn.xyz is not None: continue # self.locator.write("Attempting to coordinate {0}\n".format(tostn.code)) obs = self.obs[tostn] if "SD" not in obs and "HD" not in obs: self.locator.write( " Cannot fix {0} - no distance\n".format(tostn.code)) continue # Work out an azimuth azimuths = [] if "AZ" in obs: azimuths = [ o.obsvalue.value + (180.0 if o.reverse else 0) for o in obs["AZ"] ] if "HA" in obs: for o in obs["HA"]: if not o.obsset.defined: continue az = o.obsvalue.value + o.obsset.orientation if o.reverse: az += 180.0 azimuths.append(az) # If no horizontal angles defined yet, then can't set trial coord yet if len(azimuths) == 0: self.locator.write( " Cannot fix {0} - no azimuth\n".format(tostn.code)) continue azimuths = np.array(azimuths) azimuths *= math.radians(1.0) az0 = azimuths[0] azimuths = (np.remainder( np.array(azimuths) - az0 + math.pi, 2 * math.pi) + az0 - math.pi) azimuth = np.mean(azimuths) llh0 = GRS80.geodetic(self.xyz) hgtdiff = 0.0 if "LV" in obs: hgtdiff = np.mean([o.obsvalue.value for o in obs["LV"]]) # Only use ZD if don't have levelled height difference elif "ZD" in obs: slope = True distance = 0.0 if "SD" in obs: distance = np.mean( [o.obsvalue.value for o in obs["SD"]]) slope = True else: distance = np.mean( [o.obsvalue.value for o in obs["HD"]]) slope = False hgtdiffs = [] for o in obs["ZD"]: angle = math.radians(o.obsvalue.value) sd = math.sin(angle) * distance if slope else distance corr = sd / (2.0 * (llh0[2] + MeanEarthRadius)) angle -= corr hd = math.cos(angle) * distance hd += o.obsvalue.insthgt hd -= o.obsvalue.trgthgt if o.reverse: hd = -hd hgtdiffs.append(hd) hgtdiff = np.mean(hgtdiffs) if hgtdiff is None: self.locator.write( " Cannot fix {0} - no height data\n".format( tostn.code)) continue hordists = [o.obsvalue.value for o in obs.get("HD", [])] for o in obs.get("SD", []): vdist = hgtdiff if o.reverse: vdist = -vdist vdist += o.obsvalue.trgthgt - o.obsvalue.insthgt hdist = o.obsvalue.value * o.obsvalue.value - vdist * vdist if hdist > 0.0: hordists.append(math.sqrt(hdist)) else: hordists.append(0.0) hordist = np.mean(hordists) denu = np.array((hordist * math.sin(azimuth), hordist * math.cos(azimuth), hgtdiff)) dxyz = self.enu_axes.T.dot(denu) txyz = self.xyz + dxyz llh1 = GRS80.geodetic(txyz) llh1[2] = llh0[2] + hgtdiff txyz = GRS80.xyz(llh1) tostn.addTrialXyz(self, txyz) self.locator.write( " Trial coordinate determined for {0}: {1}\n".format( tostn.code, str(txyz))) # Finally refix any stations for stn in hafixstations: stn.fixStation(stn.xyz, exclude)
def setXyz(self, xyz): self.xyz = np.array(xyz) lon, lat, hgt = GRS80.geodetic(xyz) self.enu_axes = GRS80.enu_axes(lon, lat)
def main(): calcDate = datetime(date.today().year, 1, 1) parser = argparse.ArgumentParser(_description, epilog=_epilog) parser.add_argument( "gdb_file", default="gdb_coords.txt", help="Name of gdb file input file, or 'gdb' to use online database, or 'none' to not compare with gdb", ) parser.add_argument( "check_file", nargs="?", default="coord_differences.csv", help="Output csv file of coordinate comparisons", ) parser.add_argument( "-u", "--update-csv-file", help="Name of gdb coordinate upload file (default none)", ) parser.add_argument( "-x", "--snap-gx-file", help="Name of snap data file (default none)" ) parser.add_argument( "-c", "--snap-crd-file", help="Name of snap coordinate file (default none)" ) parser.add_argument( "-f", "--def-dir", help="Deformation model base dir (contains tools and model)" ) parser.add_argument( "-m", "--model-dir", help="Directory containing deformation model" ) parser.add_argument( "-s", "--stn-dir", default="stations", help="Directory containing SPM xml definitions", ) parser.add_argument( "-d", "--calc-date", help="Calculation date for coordinates (default " + calcDate.strftime("%Y-%m-%d") + ")", ) parser.add_argument( "-a", "--all-marks", action="store_true", help="Output coord information for all marks including not in gdb", ) parser.add_argument( "-e", "--extend-dates", action="store_true", help="Extrapolate models before first or after last dates", ) parser.add_argument( "-i", "--itrf-only", action="store_true", help="Calculate ITRF coordinates but not GDB", ) parser.add_argument( "--snap-gx-error", nargs=2, type=float, default=default_gx_error, help="Snap horizontal and vertical mm error", ) parser.add_argument( "--snap-gx-refframe", default=model_itrf, help="Snap GX observation reference frame code", ) parser.add_argument( "--snap-crd-use-itrf", action="store_true", help="Snap GX observation reference frame code", ) args = parser.parse_args() gdbfile = args.gdb_file chkfile = args.check_file updfile = args.update_csv_file snapgxfile = args.snap_gx_file snapgxrf = args.snap_gx_refframe snapcrdfile = args.snap_crd_file stndir = args.stn_dir defdir = args.def_dir allmarks = args.all_marks alldates = args.extend_dates itrfonly = args.itrf_only gx_error = args.snap_gx_error crd_itrf = args.snap_crd_use_itrf mdldir = args.model_dir or (defdir + "/model" if defdir else "model") if args.calc_date is not None: calcDate = datetime.strptime(args.calc_date, "%Y-%m-%d") if args.def_dir: sys.path.append(defdir + "/tools/LINZ") from LINZ.DeformationModel import Model as DefModel from LINZ.Geodetic.Ellipsoid import GRS80 from LINZ.Geodetic.ITRF import Transformation itrf_tfm = Transformation(from_itrf=model_itrf, to_itrf="ITRF96") calcgdb = not itrfonly if gdbfile == "gdb": from LINZ.Geodetic import GDB GDB.setCached() def gdbcrds(code): try: markdata = GDB.get(code) coord = markdata.coordinate return [coord.longitude, coord.latitude, coord.height] except: return None elif gdbfile == "none": gdbcrds = lambda code: None allmarks = True calcgdb = False else: markcrds = {} with open(gdbfile, "r") as gdbf: l = gdbf.readline() l = l.lower().replace('"', "").replace(",", " ") if l.split() == "code gdb_lon gdb_lat gdb_hgt".split(): crdorder = (1, 2, 3) elif l.split() == "geodeticcode lat lon ellhgt".split(): crdorder = (2, 1, 3) else: raise RuntimeError("Invalid fields in " + gdbfile) for l in gdbf: l = l.lower().replace('"', "").replace(",", " ") try: parts = l.split() crds = [float(parts[x]) for x in crdorder] markcrds[parts[0].upper()] = crds except: pass gdbcrd = lambda code: markcrds.get(code) needgdb = False neednz2k = False csv = None if chkfile and chkfile != "none": csv = open(chkfile, "w") itrfc = model_itrf.lower() columns = [ "code", "scm_version", "deformation_version", "calc_date", itrfc + "_X", itrfc + "_Y", itrfc + "_Z", itrfc + "_lon", itrfc + "_lat", itrfc + "_hgt", ] if not itrfonly: neednz2k = True columns.extend( [ "itrf96_lon", "itrf96_lat", "itrf96_hgt", "nzgd2000_lon", "nzgd2000_lat", "nzgd2000_hgt", ] ) if calcgdb: needgdb = True columns.extend( ["gdb_lon", "gdb_lat", "gdb_hgt", "e_diff", "n_diff", "h_diff"] ) csv.write(",".join(columns)) csv.write("\n") csvu = None logf = None if updfile: needgdb = True neednz2k = True csvu = open(updfile, "w") csvu.write("CODE,LAT,LON,EHGT,ROBG,COSY,DATE,COMM\n") updcomment = "SPM version {0:%Y-%m-%d %H:%M:%S}, Deformation model version {1}, Calc date {2:%Y-%m-%d %H:%M:%S}" logfile = updfile + ".log" logf = open(logfile, "w") datf = None if snapgxfile: datf = open(snapgxfile, "w") datf.write("CORS reference stations coordinates\n") datf.write("\n") datf.write("#date {0:%Y-%m-%d}\n".format(calcDate)) datf.write("#gx_enu_error {0} {0} {1} mm\n".format(*gx_error)) datf.write("#reference_frame {0}\n".format(snapgxrf)) datf.write("#classify gx source scm\n".format(model_itrf)) datf.write("#classification scmversion\n".format(model_itrf)) datf.write("#classification scmlastobs\n".format(model_itrf)) datf.write( "#data gx scmversion scmlastobs value no_heights\n\n".format(model_itrf) ) crdf = None if snapcrdfile: neednz2k = not crd_itrf crdf = open(snapcrdfile, "w") if neednz2k: defmodel = DefModel.Model(mdldir) if logf: logf.write("calc_refstation_coordinates\n\n") logf.write("Run time: {0:%Y-%m-%d %H:%M:%S}\n".format(datetime.now())) logf.write("Deformation model: {0}\n".format(mdldir)) logf.write("Deformation model version: {0}\n".format(defmodel.version())) logf.write("Station coordinate model directory: {0}\n".format(stndir)) logf.write("Coordinate comparisons written to: {0}\n".format(chkfile)) logf.write("GDB update file written to: {0}\n".format(updfile)) logf.write( "Coordinate calculation date: {0:%Y-%m-%d %H:%M:%S}\n".format(calcDate) ) logf.write("GDB coordinates: {0}\n".format(gdbfile)) if crdf: crdf.write( "PositioNZ coordinates calculated at {0:%Y-%m-%d %H:%M:%S}\n".format( calcDate ) ) if crd_itrf: crdf.write("{0}@{1:%Y%m%d}\n".format(model_itrf, calcDate)) else: crdf.write("NZGD2000_{0}\n".format(defmodel.version())) crdf.write( "options no_geoid ellipsoidal_heights degrees c=scmversion c=scmlastobs\n\n" ) codes = [] for f in os.listdir(stndir): m = re.match(r"^(\w{4}).xml$", f) if not m: continue codes.append(m.group(1)) for code in sorted(codes): f = code + ".xml" gcrds = gdbcrds(code) if gcrds is None and not allmarks: continue print("Processing", code) try: m = spm(filename=stndir + "/" + f) if logf is not None: logf.write( "{0} model version: {1:%Y-%m-%d %H:%M:%S}\n".format( code, m.versiondate ) ) # Don't want annual and semi-annual components... for c in m.components: if "annual" in c.componentType(): c.setEnabled(False) if not alldates: if m.startdate is not None and calcDate < m.startdate: continue if m.enddate is not None and calcDate > m.enddate: continue xyz08 = m.calc(calcDate, enu=False) llh08 = GRS80.geodetic(xyz08) if neednz2k: llh96 = itrf_tfm.transformLonLat(llh08[0], llh08[1], llh08[2], calcDate) if llh96[0] < 0: llh96[0] += 360.0 llhnz2k = defmodel.applyTo(llh96, date=calcDate, subtract=True) if llh96[0] > 180: llh96[0] -= 360.0 if llhnz2k[0] > 180: llhnz2k[0] -= 360.0 if csv: csv.write('"{0}"'.format(code)) csv.write(',"{0:%Y-%m-%d %H:%M:%S}"'.format(m.versiondate)) csv.write(',"{0}"'.format(defmodel.version())) csv.write(',"{0:%Y-%m-%d %H:%M:%S}"'.format(calcDate)) csv.write(",{0:.4f},{1:.4f},{2:.4f}".format(*xyz08)) csv.write(",{0:.9f},{1:.9f},{2:.4f}".format(*llh08)) if not itrfonly: csv.write(",{0:.9f},{1:.9f},{2:.4f}".format(*llh96)) csv.write(",{0:.9f},{1:.9f},{2:.4f}".format(*llhnz2k)) ucode = code.upper() if calcgdb: if gcrds is not None: csv.write(",{0:.9f},{1:.9f},{2:.4f}".format(*gcrds)) dedln, dndlt = GRS80.metres_per_degree(*gcrds) edif = (gcrds[0] - llhnz2k[0]) * dedln ndif = (gcrds[1] - llhnz2k[1]) * dndlt hdif = gcrds[2] - llhnz2k[2] csv.write(",{0:.4f},{1:.4f},{2:.4f}".format(edif, ndif, hdif)) else: csv.write(",,,,,") csv.write("\n") if csvu: comment = updcomment.format( m.versiondate, defmodel.version(), calcDate ).replace('"', '""') csvu.write( '"{0}",{2:.9f},{1:.9f},{3:.4f},"B10","NZGD2000","2000.01.01","{4}"\n'.format( code.upper(), llhnz2k[0], llhnz2k[1], llhnz2k[2], comment ) ) scmlastobs = ( "{0:%Y%m%d}".format(m.enddate) if m.enddate is not None else "-" ) scmversion = "SCM_{0:%Y%m%d}".format(m.versiondate) if datf: datf.write( "{0} {1} {2} {3:.4f} {4:.4f} {5:.4f}\n".format( code.upper(), scmversion, scmlastobs, xyz08[0], xyz08[1], xyz08[2], ) ) if crdf: snapcrd = llh08 if crd_itrf else llhnz2k crdf.write( "{0} {1:.10f} {2:.10f} {3:.4f} {4} {5} {0}\n".format( code.upper(), snapcrd[1], snapcrd[0], snapcrd[2], scmversion, scmlastobs, ) ) except: print(sys.exc_info()[1]) for f in (csv, csvu, logf, datf, crdf): if f is not None: f.close()
def transformation(xyz, date): llh = GRS80.geodetic(xyz) llh2k = itrf_nzgd2000.transform(llh[0], llh[1], llh[2], date) return GRS80.xyz(llh2k)
def main(): epilog = """ Available coordinate systems are NZGD2000, ITRFxxxx. These are geographic coordinates. For geocentric coordinates add _XYZ. Multiple coordinate systems can be entered, eg ITRF2008+NZGD2000 The epoch can be formatted as YYYY-MM-DD or now-#### for #### days before now. A range of dates can be selected as date1:date2 Appended to this can be criteria :W# to select a day of the week (0-6, 0=Monday) or :M# for the day of month. The output CSV file can contain strings {yyyy} {mm} and {dd} that are replaced the year, month and day. """ parser = argparse.ArgumentParser( description="Export cors station coordinates from time series database", epilog=epilog, parents=[deformationModelArguments()], ) parser.add_argument("timeseries_data", help="Data source for CORS timeseries coordinates") parser.add_argument( "epoch", help="Epoch or range at which coordinates are required") parser.add_argument("output_csv_file", help="File to write coordinates to") parser.add_argument( "-c", "--coordinate-systems", help="Coordinate systems to output (default NZGD2000)", ) parser.add_argument( "-i", "--cors-itrf", default="ITRF2008", help="CORS timeseries ITRF reference frame", ) parser.add_argument( "-s", "--stations", help="Specifies stations to export, separated by +, or @filename", ) parser.add_argument( "-n", "--nz-only", action="store_true", help="Only list New Zealand stations (including Chathams)", ) parser.add_argument( "-t", "--solution-type", help= "Solution type to extract (required if data includes more than one type)", ) args = parser.parse_args() cors_itrf = args.cors_itrf tslist = None outputfiles = {} try: tslist = TimeseriesList(args.timeseries_data, solutiontype=args.solution_type, normalize=True) try: parts = args.epoch.split(":") if len(parts) < 2: parts.append(parts[0]) startenddate = [] for p in parts[:2]: match = re.match(r"^now-(\d+)$", p, re.I) if match: dtp = dt.date.today() - dt.timedelta( days=int(match.group(1))) startenddate.append( dt.datetime.combine(dtp, dt.time(0, 0, 0))) else: startenddate.append( dt.datetime.strptime(args.epoch, "%Y-%m-%d")) useday = lambda d: True if len(parts) > 2: match = re.match(r"^([MW])(\d\d?)$", parts[2].upper()) if not match: raise RuntimeError("Invalid epoch date selector " + parts[2]) dayno = int(match.group(2)) if match.group(1) == "M": useday = lambda d: d.day == dayno else: useday = lambda d: d.weekday() == dayno startdate, enddate = startenddate increment = dt.timedelta(days=1) while startdate <= enddate: calcdate = startdate startdate = startdate + increment if not useday(calcdate): continue filename = args.output_csv_file filename = filename.replace("{yyyy}", "{0:04d}".format(calcdate.year)) filename = filename.replace("{mm}", "{0:02d}".format(calcdate.month)) filename = filename.replace("{dd}", "{0:02d}".format(calcdate.day)) filename = filename.replace( "{ddd}", "{0:03d}".format(calcdate.timetuple().tm_yday)) if filename not in outputfiles: outputfiles[filename] = [] outputfiles[filename].append(calcdate) except: raise RuntimeError( "Invalid calculation epoch - must be YYYY-MM-DD:" + args.epoch) if len(outputfiles) == 0: raise RuntimeError("No dates defined for station coordinates") itrf_nzgd2000 = None nzgd2000_version = "" conversions = [] geodetic_suffix = "_lon _lat _ehgt".split() xyz_suffix = "_X _Y _Z".split() coord_cols = [] for cs in (args.coordinate_systems or "NZGD2000").upper().split("+"): match = re.match(r"^(NZGD2000|ITRF(?:19|20)\d\d)(_XYZ)?$", cs) if not match: print("Invalid coordinate system requested:", cs) sys.exit() csbase = match.group(1) isgeodetic = match.group(2) != "_XYZ" transformation = None if csbase == "NZGD2000": if not itrf_nzgd2000: defmodel = loadDeformationModel(args) nzgd2000_version = defmodel.version() itrf_nzgd2000 = ITRF_NZGD2000.Transformation( itrf=cors_itrf, model=defmodel) def transformation(xyz, date): llh = GRS80.geodetic(xyz) llh2k = itrf_nzgd2000.transform(llh[0], llh[1], llh[2], date) return GRS80.xyz(llh2k) else: transformation = ITRF.Transformation(from_itrf=cors_itrf, to_itrf=csbase).transform conversions.append((transformation, isgeodetic)) coord_cols.extend((csbase + suffix for suffix in ( geodetic_suffix if isgeodetic else xyz_suffix))) check_code = lambda code: True if args.stations: stations = [] for s in args.stations.split("+"): if s.startswith("@"): try: with open(s[1:]) as sf: stations.extend(sf.read().upper().split()) except: raise RuntimeError("Cannot open station list file " + s[1:]) elif s != "": stations.append(s.upper()) if len(stations) == 0: raise RuntimeError("No stations specifed in " + args.stations) check_code = lambda code: code.upper() in stations check_xyz = lambda xyz: True if args.nz_only: nzxyz = GRS80.xyz(177.0, -42.0) nzdist = 1000000.0 check_xyz = lambda xyz: (math.sqrt( (xyz[0] - nzxyz[0])**2 + (xyz[1] - nzxyz[1])**2 + (xyz[2] - nzxyz[2])**2) < nzdist) for output_csv_file in sorted(outputfiles): calcdates = outputfiles[output_csv_file] ncoord = 0 buildfile = output_csv_file + ".temp" with open(buildfile, "w") as csvf: writer = csv.writer(csvf) header = "code epoch".split() if nzgd2000_version: header.append("nzgd2000_version") header.extend(coord_cols) writer.writerow(header) for code in tslist.codes(): if not check_code(code): continue for calcdate in calcdates: ts = tslist.get(code).getData(enu=False) try: crd = ts.ix[calcdate] except KeyError: continue if type(crd) is pd.DataFrame: print( "Ambiguous coordinates {0} at date {1}".format( code, calcdate)) crd = crd[-1:].ix[calcdate] xyz_2008 = [crd.x, crd.y, crd.z] if not check_xyz(xyz_2008): continue row = [code, calcdate.strftime("%Y-%m-%d")] if nzgd2000_version: row.append(nzgd2000_version) for transformation, isgeodetic in conversions: try: xyz = transformation(xyz_2008, calcdate) if isgeodetic: llh = GRS80.geodetic(xyz) row.extend([ "{0:.9f}".format(llh[0]), "{0:.9f}".format(llh[1]), "{0:.4f}".format(llh[2]), ]) else: row.extend([ "{0:.4f}".format(xyz[0]), "{0:.4f}".format(xyz[1]), "{0:.4f}".format(xyz[2]), ]) except OutOfRangeError: row.extend(["", "", ""]) writer.writerow(row) ncoord += 1 if ncoord == 0: os.unlink(buildfile) print(output_csv_file + " not built as coordinates not available") else: os.rename(buildfile, output_csv_file) except RuntimeError as e: print(e)
def xyzOffset(self): xyz1 = GRS80.xyz(self.lon, self.lat, self.hgt) dxyz = xyz1 - self.xyz0 return self.enu_axes.dot(dxyz)