def polar2cartesian(r,theta,ang='deg'): """ Coordinates conversion polar => cartesian conversion Parameters ---------- r , theta : float or iterable of floats polar coordinates ang : string 'deg' (degrees) or 'rad' (radian) Returns ------- x , y : numpy.array of float cartesian coordinates """ if utils.is_iterable(r): r = np.array(r) if utils.is_iterable(theta): theta = np.array(theta) if ang == 'deg': theta = np.deg2rad(theta) x = r * np.cos(theta) y = r * np.sin(theta) return x , y
def linear_coef_a_b(x1, y1, x2, y2): """ Gives coefficients of the line between two points (x1,y1) & (x2,y2) x1,y1,x2,y2 can be iterables Parameters ---------- x1,y1,x2,y2 : float or list or numpy.array Coordinates of the 1st and the 2nd point Returns ------- a : float regression coefficient b1 & b2 : float regression offsets coefficient (b1 must be equal to b2) """ if utils.is_iterable(x1): x1 = np.array(x1, dtype=np.float64) x2 = np.array(x2, dtype=np.float64) y1 = np.array(y1, dtype=np.float64) y2 = np.array(y2, dtype=np.float64) else: x1 = float(x1) x2 = float(x2) y1 = float(y1) y2 = float(y2) a = (y2 - y1) / (x2 - x1) b1 = y1 - a * x1 b2 = y2 - a * x2 return a, b1, b2
def read_rinex_met(metfile): """ This function reads RINEX Meteorological files and convert to Pandas DataFrame Parameters ---------- metfile: Path of RINEX Meteorological file in List/String (e.g. made with glob) Returns ------- DF: Meteorological data in DataFrame Notes ----- Written by Chaiyaporn Kitpracha """ if utils.is_iterable(metfile): merge_df = pd.DataFrame() for metfile_m in metfile: met_df = read_rinex_met_2(str(metfile_m)) merge_df = pd.concat([merge_df,met_df]) return merge_df else: met_df = read_rinex_met_2(metfile) return met_df
def interp_get(self, T): if self.bool_interp_uptodate == False: print("WARN : interp obsolete, recalcul auto") self.interp_set() tsout = copy.copy(self) tsout.del_data() if not utils.is_iterable(T): T = np.array([T]) if self.typeobs == 'NULL': print("ERR : pas de typeobs defini (NULL)") return 0 if self.typeobs == 'RPY': A = self.RfT(T) B = self.PfT(T) C = self.YfT(T) for i in range(len(T)): tsout.add_obs(Attitude(A[i], B[i], C[i], T=T[i])) return tsout
def rinex_read_epoch(input_rinex_path_or_string,interval_out=False, add_tzinfo=False,out_array=True): """ input_rinex_path_or_string : can be the path of a RINEX or directly the RINEX content as a string 161019 : dirty copier coller de rinex start end """ epochs_list = [] rinex_60sec = False if utils.is_iterable(input_rinex_path_or_string): Finp = input_rinex_path_or_string elif os.path.isfile(input_rinex_path_or_string): Finp = open(input_rinex_path_or_string) else: Finp = input_rinex_path_or_string for line in Finp: epoch_rnx2=re.search('^ {1,2}([0-9]{1,2} * ){5}',line) if epoch_rnx2: fraw = line.split() #fraw est la pour gerer les fracion de sec ... fraw = fraw[0:6] fraw = [float(e) for e in fraw] f = [int(e) for e in fraw] msec = (fraw[5] - np.floor(fraw[5])) msec = np.round(msec,4) msec = int(msec * 10**6) f.append( msec ) # ajour des fractions de sec if f[0] < 50: f[0] = f[0] + 2000 else: f[0] = f[0] + 1900 if f[5] == 60: # cas particulier rencontré dans des rinex avec T = 60sec rinex_60sec = True f[5] = 59 else: rinex_60sec = False try: epochdt = dt.datetime(*f) if rinex_60sec: epochdt = epochdt + dt.timedelta(seconds=1) epochs_list.append(epochdt) except: print("ERR : rinex_read_epoch : " , f) if add_tzinfo: epochs_list = [ e.replace(tzinfo=dateutil.tz.tzutc()) for e in epochs_list] if out_array: return np.array(epochs_list) else: return epochs_list
def interp_get(self,T,coortype='ENU'): if self.bool_interp_uptodate == False: print("WARN : interp obsolete, recalcul auto") self.interp_set() tsout = copy.copy(self) tsout.del_data() if not utils.is_iterable(T): T = np.array([T]) if coortype == 'ENU': A = self.EfT(T) B = self.NfT(T) C = self.UfT(T) if coortype == 'XYZ': A = self.XfT(T) B = self.YfT(T) C = self.ZfT(T) if coortype == 'FLH': A = self.FfT(T) B = self.LfT(T) C = self.HfT(T) for i in range(len(T)): tsout.add_point(Point(A[i],B[i],C[i],T=T[i],initype=coortype)) return tsout
def C_z(theta): """ Gives the rotation matrix around the Z-axis [C,-S,0] [S, C,0] [0, 0,1] Source ------ https://fr.wikipedia.org/wiki/Matrice_de_rotation#En_dimension_trois """ if not utils.is_iterable(theta): theta = np.array([theta]) C = np.cos(theta) S = np.sin(theta) Z = np.zeros(len(theta)) I = np.ones(len(theta)) C_z = np.stack([[C, -S, Z], [S, C, Z], [Z, Z, I]]) C_z = np.squeeze(C_z) return C_z
def C_y(theta): """ Gives the rotation matrix around the Y-axis Manage iterable (list, array) as input [ C,0,S] [ 0,1,0] [-S,0,C] Source ------ https://fr.wikipedia.org/wiki/Matrice_de_rotation#En_dimension_trois """ if not utils.is_iterable(theta): theta = np.array([theta]) C = np.cos(theta) S = np.sin(theta) Z = np.zeros(len(theta)) I = np.ones(len(theta)) C_y = np.stack([[C, Z, S], [Z, I, Z], [-S, Z, C]]) C_y = np.squeeze(C_y) return C_y
def ENU2XYZ(E, N, U, Xref, Yref, Zref): """ Coordinates conversion ENU Topocentic => XYZ ECEF Geocentric Parameters ---------- X,Y,Z : numpy.array of floats cartesian coordinates X,Y,Z in meters x0,y0,z0 : floats coordinate of the topocentric origin point in the geocentric frame Returns ------- E,N,U : numpy.array of floats East North Up Component (m) w.r.t. x0,y0,z0 Source ------ https://gssc.esa.int/navipedia/index.php/Transformations_between_ECEF_and_ENU_coordinates """ if utils.is_iterable(E): Xlist, Ylist, Zlist = [], [], [] for e, n, u in zip(E, N, U): x, y, z = ENU2XYZ(e, n, u, Xref, Yref, Zref) Xlist.append(x) Ylist.append(y) Zlist.append(z) return np.array(Xlist), np.array(Ylist), np.array(Zlist) else: fr, lr, hr = XYZ2GEO(Xref, Yref, Zref) f0 = np.deg2rad(fr) l0 = np.deg2rad(lr) R = np.array( [[-np.sin(l0), np.cos(l0), 0], [-np.sin(f0) * np.cos(l0), -np.sin(f0) * np.sin(l0), np.cos(f0)], [np.cos(f0) * np.cos(l0), np.cos(f0) * np.sin(l0), np.sin(f0)]]) R3 = R.T R3 = scipy.linalg.inv(R) ENU = np.vstack((E, N, U)) xyz = np.dot(R3, ENU) #+ np.vstack((Xref,Yref,Zref)) X = float(xyz[0]) + Xref Y = float(xyz[1]) + Yref Z = float(xyz[2]) + Zref return X, Y, Z
def midas_vel_files_2_pandas_DF(vel_files_in): """ Convert MIDAS Velocity files to a Pandas DataFrame Parameters ---------- vel_files_in : str or list of str if list of str, will consider directly the files inside the list if str, can be the path of a single file, or a generic (wilcard) path to consider several files Returns ------- DF : Pandas DataFrame """ if utils.is_iterable(vel_files_in): L_vel_files = vel_files_in else: L_vel_files = glob.glob(vel_files_in) if len(L_vel_files) > 1: work_dir = os.path.dirname(L_vel_files[0]) vel_file_opera = utils.cat(work_dir + "/MERGED_VEL" , *L_vel_files) else: vel_file_opera = L_vel_files[0] DF = pd.read_table(vel_file_opera,comment='*',header=-1,delim_whitespace = True) DF.columns = ["Station", "soft", "epoch_first", "epoch_last", "duration", "nb_epoch_all", "nb_epoch_good", "nb_pairs", "V_East", "V_North", "V_Up", "sV_East", "sV_North", "sV_Up", "offset_e_1st_epoch", "offset_n_1st_epoch", "offset_u_1st_epoch", "outlier_ratio_e", "outlier_ratio_n", "outlier_ratio_u", "std_velo_pair_e", "std_velo_pair_n", "std_velo_pair_u", "nb_steps"] return DF
def insert_lines_in_file(file_path, text_values, lines_ids): if not utils.is_iterable(text_values): text_values = [text_values] if not utils.is_iterable(lines_ids): lines_ids = [lines_ids] f = open(file_path, "r") contents = f.readlines() f.close() for txt, lin in zip(text_values, lines_ids): contents.insert(lin, txt) f = open(file_path, "w") contents = "".join(contents) f.write(contents) f.close() return file_path
def calc_stand_ties(epoc, lat_ref, h_ref, h_rov, p0, t0, e0, unit="mm"): """ Determine standard atmospheric ties with analytical equation from Teke et al. (2011) Parameters: ---------- epoc : time in Python datetime lat_ref : Latitude of Ref. station h_ref : Height of Ref. station h_rov : Height of Rov. station p0: Pressure of Ref. station in hPa t0: Temperature in C of Ref. station e0: Water vapor pressure of Ref. station in hPa unit : in meters (m) or milimeters (mm) Return: ---------- ties : Standard ties of total delay in milimeters or meters """ if utils.is_iterable(lat_ref): ties = [] for epoc_m, lat_ref_m, h_ref_m, h_rov_m, p0_m, t0_m, e0_m in zip( epoc, lat_ref, h_ref, h_rov, p0, t0, e0): ties_m = calc_stand_ties(epoc_m, lat_ref_m, h_ref_m, h_rov_m, p0_m, t0_m, e0_m) ties.append(ties_m) return np.array(ties) else: t0 = t0 + 273.15 #convert to Kelvin p = p0 * ((1 - (0.0065 * (h_rov - h_ref) / t0))**(9.80665 / (0.0065 * 287.058))) dZHD = (0.0022768 * (p0 - p)) / (1 - 0.00266 * np.cos(2 * lat_ref) - 0.28e-6 * h_ref) dZWD = 2.789 * e0 / (t0**2) * ( (5383 / t0) - 0.7803) * 0.0065 * (h_rov - h_ref) if unit == "m": return np.round(dZHD + dZWD, 4) elif unit == "mm": return np.round((dZHD + dZWD) * 1000, 4)
def fuv_calc(V, A, P=1, normafuv=1): """ Args : V : residuals vector A : Jacobian matrix P : weight matrix Can manage both standard arrays and sparse array Returns : fuv : Facteur unitaire de variance (unitary variance factor) Notes : le fuv dépend de la martice de poids mais les sigmas non ex : poids de 10**-6 fuv : 439828.260843 sigma : [ 5.21009306 5.09591568 0.04098106] poids de 1 fuv : 4.39828260843e-07 sigma : [ 5.21009306 5.09591568 0.04098106] """ V = np.squeeze(np.array(V)) if not utils.is_iterable(P): P = np.ones(len(V)) * P elif scipy.sparse.issparse(P): P = P.diagonal() else: print( "DEPRECIATION : modification done in fuv_calc, P should be given as Matrix-shaped now" ) P = P.diagonal() P = P * (1 / normafuv) VPV = np.column_stack((V, P, V)) numera = np.sum(np.product(VPV, 1)) # nummera is just an adaptation of VT * P * V # EDIT 1806 : Je veux bien ... mais c'est une adaptation pourrie ! # ou alors il faut bien s'assurer que l'on a extrait la diagonale de P if A.ndim == 1: A = np.expand_dims(A, 0) fuv = numera / np.abs(A.shape[0] - A.shape[1]) return fuv
def savage_buford_formula(Vs, X, d): """ X : distance à la faille , un iterable pour toutes le profil, un nombre pour la longeur max d : profondeur de la faille retourne X , et Vdeform(X) X et d doivent être dans la même unité, Vs pas forcément """ if not utils.is_iterable(X): X = np.arange(-X, X, 1) return X, (Vs / np.pi) * np.arctan2(X, d)
def read_bull_B(file_path_in): """ Read an Bulletin B file Parameters ---------- file_path_in : str Path of the file in the local machine. Returns ------- DFout : pandas DataFrame Returns a panda table format with the data extracted from the file. """ if not utils.is_iterable(file_path_in): file_path_in = [file_path_in] file_path_in = sorted(file_path_in) DFstk = [] for path_solo in file_path_in: S = utils.extract_text_between_elements(path_solo, "1 - DAILY FINAL VALUES", "2 - DAILY FINAL VALUES") L = S.replace('\t', '\n').split("\n") L2 = [] for e in L: if len(e) > 0: if e[0] != " ": L2.append(e) L3 = [] for e in L2: L4 = [] for ee in e.split(): L4.append(float(ee)) L3.append(L4) DF = pd.DataFrame(np.vstack(L3)) DFstk.append(DF) DFout = pd.concat(DFstk) DFout.columns = [ "year", "month", "day", "MJD", "x", "y", "UT1-UTC", "dX", "dY", "x err", "y err", "UT1 err", "X err", "Y err" ] return DFout
def figure_saver(figobjt_in , outdir , outname , outtype = '.png' , formt = 'a4' ): if not utils.is_iterable(outtype): outtype = (outtype,) outpath_stk = [] for outtype_iter in outtype: outpath = os.path.join(outdir,outname+outtype_iter) # if formt == 'a4': # elif figobjt_in.savefig(outpath) outpath_stk.append(outpath) if len(outpath_stk) == 1: outpath_stk = outpath_stk[0] return outpath_stk
def cartesian2polar(x,y): """ Coordinates conversion cartesian => polar conversion Parameters ---------- x , y : numpy.array of float cartesian coordinates Returns ------- r , theta : float polar coordinates (radius / angle in radians) """ if utils.is_iterable(x): x = np.array(x) if utils.is_iterable(y): y = np.array(y) theta = np.arctan2(y,x) r = np.sqrt(x** + y**2) return r , theta
def interp_get(self, T, coortype='ENU'): """ Method to get the coordinate interpolators Parameters ---------- T : float or list of float Time (IN POSIX Time) where the interpolation is wished. coortype : str, optional The coordinates type. The default is 'ENU'. Returns ------- tsout : DESCRIPTION. """ if self.bool_interp_uptodate == False: print("WARN : interp obsolete, recalcul auto") self.interp_set() tsout = copy.copy(self) tsout.del_data() if not utils.is_iterable(T): T = np.array([T]) if coortype == 'ENU': A = self.EfT(T) B = self.NfT(T) C = self.UfT(T) if coortype == 'XYZ': A = self.XfT(T) B = self.YfT(T) C = self.ZfT(T) if coortype == 'FLH': A = self.FfT(T) B = self.LfT(T) C = self.HfT(T) for i in range(len(T)): tsout.add_point(Point(A[i], B[i], C[i], T=T[i], initype=coortype)) return tsout
def eop_interpotate(DF_EOP, Epochs_intrp, eop_params=["x", "y"]): """ Interopolate the EOP provided in a C04-like DataFrame Parameters ---------- DF_EOP : DataFrame Input EOP DataFrame (C04 format). Can be generated by files_rw.read_eop_C04 Epochs_intrp : datetime of list of datetimes Wished epochs for the interpolation. eop_params : list of str, optional Wished EOP parameter to be interpolated. The default is ["x","y"]. Returns ------- OUT : DataFrame or Series Interpolated parameters. Series if onely one epoch is provided, DF_EOP elsewere """ if not utils.is_iterable(Epochs_intrp): singleton = True else: singleton = False I_eop = dict() Out_eop = dict() Out_eop["epoch"] = Epochs_intrp for eoppar in eop_params: I = conv.interp1d_time(DF_EOP.epoch, DF_EOP[eoppar]) I_eop[eoppar] = I try: Out_eop[eoppar] = I(Epochs_intrp) except ValueError as err: print("ERR: in EOP interpolation") print("param.:", eoppar, "epoch:", Epochs_intrp) raise err if not singleton: OUT = pd.DataFrame(Out_eop) else: OUT = pd.Series(Out_eop) return OUT
def to_dataframe(self, coortype='XYZ'): """ Export the TimeSerie Object as DataFrame Parameters ---------- coortype : str or iterable of str. The coordinates type exported to the DataFrame. 'XYZ', 'FLH', 'ENU', 'NED' can be also an iterable like ('XYZ','FLH') The default is 'XYZ'. Returns ------- DF : DataFrame output DataFrame. """ if not utils.is_iterable(coortype): coortype = (coortype, ) ColStk = tuple() ColNameStk = [] for icoty, coty in enumerate(coortype): A, B, C, T, sA, sB, sC = self.to_list(coty) if icoty == 0: Tdt = conv.posix2dt(T) ColStk = ColStk + (Tdt, T, A, B, C, sA, sB, sC) ColNameStk = ["Tdt", "T"] + [e for e in coty ] + ["s" + e for e in coty] else: ColStk = ColStk + (A, B, C, sA, sB, sC) ColNameStk = [e for e in coty] + ["s" + e for e in coty] BIG = np.column_stack(ColStk) DF = pd.DataFrame(BIG) DF.columns = ColNameStk DF = DF.infer_objects() return DF
def PWV_conversion(zwd, Tm): """ This function convert from Zenith Wet delay to Precipitate Water Vapor (PWV) Parameters ---------- zwd : Zenith wet delay in meters Tm: Mean temperature of troposphere Returns ---------- PWV : Precipitate Water Vapor in mm Sources ---------- Solution and Constant k2' k3 from Atmospheric effects in Space Geodesy Chapter 3. """ if utils.is_iterable(zwd): PWV = [] for zwd_m, Tm_m in zip(zwd, Tm): pwv = PWV_conversion(zwd_m, Tm_m) PWV.append(pwv) return np.array(PWV) else: k1 = 77.689 k2 = 71.2952 Md = 28.965 / 100 # g/mol -> kg/mol Mw = 18.016 / 100 # g/mol -> kg/mol k2d = k2 - (k1 * (Mw / Md)) k3 = 375463 R = 8.3143 Rhow = 999.97 CF = 10e6 * Mw / (Rhow * R * (k2d + (k3 / Tm))) PWV = CF * zwd * 1000 return np.round(PWV, 2)
def figure_saver(figobjt_in, outdir, outname, outtype=('.png', '.pdf', '.figpik'), formt=None, dpi=200, transparent=False): if not utils.is_iterable(outtype): outtype = (outtype, ) outpath_stk = [] for outtype_iter in outtype: if "pik" in outtype_iter: outpath = utils.pickle_saver(figobjt_in, outdir, outname, outtype_iter) else: outpath = os.path.join(outdir, outname + outtype_iter) if formt: if type(formt) is tuple: formtup = formt elif type(formt) is str: if formt.upper() == "A4": formtup = (11.69, 8.27) elif formt.upper() == "A3": formtup = (16.53, 11.69) else: print("WARN: issue in , assume Figure format as A4") formtup = (11.69, 8.27) figobjt_in.set_size_inches(*formtup) figobjt_in.savefig(outpath, transparent=transparent, dpi=dpi) outpath_stk.append(outpath) if len(outpath_stk) == 1: outpath_stk = outpath_stk[0] return outpath_stk
def figure_saver(figobjt_in , outdir , outname , outtype = ('.png','.pdf','.figpik') , formt = 'a4' , transparent=False): if not utils.is_iterable(outtype): outtype = (outtype,) outpath_stk = [] for outtype_iter in outtype: if "pik" in outtype_iter: outpath = utils.pickle_saver(figobjt_in,outdir, outname,outtype_iter) else: outpath = os.path.join(outdir,outname+outtype_iter) #figobjt_in.savefig(outpath) figobjt_in.savefig(outpath,transparent=transparent) outpath_stk.append(outpath) if len(outpath_stk) == 1: outpath_stk = outpath_stk[0] return outpath_stk
def kwargs_for_jacobian(kwdic_generik,kwdic_variables): """ Building a list of kwargs for the jacobian function kwdic_generik : parameters which not gonna change kwdic_variable : parameters which gonna change, so must be associated with iterable """ keys_list = list(kwdic_variables.keys()) for k,v in kwdic_variables.items(): if not utils.is_iterable(v): print('WARN : key',k,'val',v,'is not iterable !!!') values_combined = itertools.product(*list(kwdic_variables.values())) kwdic_list_out =[] for values in values_combined: kwdic_out = dict(kwdic_generik) for k,v in zip(keys_list,values): kwdic_out[k] = v kwdic_list_out.append(kwdic_out) return kwdic_list_out
def compar_sinex(snx1, snx2, stat_select=None, invert_select=False, out_means_summary=True, out_meta=True, out_dataframe=True, manu_wwwwd=None): if type(snx1) is str: week1 = utils.split_improved(os.path.basename(snx1), "_", ".")[:] week2 = utils.split_improved(os.path.basename(snx2), "_", ".")[:] if week1 != week2: print( "WARN : Dates of 2 input files are differents !!! It might be very bad !!!", week1, week2) else: wwwwd = week1 D1 = files_rw.read_sinex(snx1, True) D2 = files_rw.read_sinex(snx2, True) else: print( "WARN : you are giving the SINEX input as a DataFrame, wwwwd has to be given manually using manu_wwwwd" ) D1 = snx1 D2 = snx2 if manu_wwwwd: wwwwd = manu_wwwwd STATCommon = set(D1["STAT"]).intersection(set(D2["STAT"])) if stat_select: STATCommon_init = list(STATCommon) if invert_select: select_fct = lambda x: not x else: select_fct = lambda x: x if type(stat_select) is str: STATCommon = [ sta for sta in STATCommon_init if select_fct(re.search(stat_select, sta)) ] elif utils.is_iterable(stat_select): STATCommon = [ sta for sta in STATCommon_init if select_fct(sta in stat_select) ] else: print("WARN : check type of stat_select") D1Common = D1[D1["STAT"].isin(STATCommon)].sort_values("STAT").reset_index( drop=True) D2Common = D2[D2["STAT"].isin(STATCommon)].sort_values("STAT").reset_index( drop=True) Ddiff = pd.DataFrame() Ddiff = Ddiff.assign(STAT=D1Common["STAT"]) #### XYZ Part for xyz in ("x", "y", "z"): dif = pd.to_numeric((D2Common[xyz] - D1Common[xyz])) Ddiff = Ddiff.assign(xyz=dif) Ddiff = Ddiff.rename(columns={"xyz": xyz}) D3D = np.sqrt( (Ddiff["x"]**2 + Ddiff["y"]**2 + Ddiff["z"]**2).astype('float64')) Ddiff = Ddiff.assign(d3D_xyz=D3D) ### ENU Part E, N, U = [], [], [] enu_stk = [] for (_, l1), (_, l2) in zip(D1Common.iterrows(), D2Common.iterrows()): enu = conv.XYZ2ENU_2(l1["x"], l1["y"], l1["z"], l2["x"], l2["y"], l2["z"]) enu_stk.append(np.array(enu)) if len(enu_stk) == 0: E, N, U = np.array([]), np.array([]), np.array([]) else: ENU = np.hstack(enu_stk) E, N, U = ENU[0, :], ENU[1, :], ENU[2, :] D2D = np.sqrt((E**2 + N**2).astype('float64')) D3D = np.sqrt((E**2 + N**2 + U**2).astype('float64')) Ddiff = Ddiff.assign(e=E) Ddiff = Ddiff.assign(n=N) Ddiff = Ddiff.assign(u=U) Ddiff = Ddiff.assign(d2D_enu=D2D) Ddiff = Ddiff.assign(d3D_enu=D3D) # E,N,U = conv.XYZ2ENU_2((X,Y,Z,x0,y0,z0)) # E,N,U = conv.XYZ2ENU_2((X,Y,Z,x0,y0,z0)) if out_dataframe: out_meta = True if not out_means_summary: print("INFO : this is not used operationally and it can be improved") return Ddiff else: output = [] col_names = ("x", "y", "z", "d3D_xyz", "e", "n", "u", "d2D_enu", "d3D_enu") for xyz in col_names: output.append(stats.rms_mean(Ddiff[xyz])) for xyz in col_names: output.append(np.nanmean(Ddiff[xyz])) for xyz in col_names: output.append(np.nanstd(Ddiff[xyz])) if out_meta: print(wwwwd) nstat = len(STATCommon) week = int(wwwwd[:4]) day = int(wwwwd[4:]) output = [week, day, nstat] + output if not out_dataframe: return tuple(output) else: output_DF = pd.DataFrame(output).transpose() output_DF.columns = [ "week", "dow", "nbstat", "x_rms", "y_rms", "z_rms", "d3D_xyz_rms", "e_rms", "n_rms", "u_rms", "d2D_enu_rms", "d3D_enu_rms", "x_ari", "y_ari", "z_ari", "d3D_xyz_ari", "e_ari", "n_ari", "u_ari", "d2D_enu_ari", "d3D_enu_ari", "x_ari", "y_std", "z_std", "d3D_xyz_std", "e_ari", "n_std", "u_std", "d2D_enu_std", "d3D_enu_std" ] return output_DF
def partial_derive_old(f,var_in,var_out=0,kwargs_f={},args_f=[],h=0): ''' var_in : detrivation with respect to this variable can be a int (starts with 0) or a string descirbing the name of the var in f var_out : the output of f which needs to be considerated as the output ** must be a int ** args_f & kwargs_f : tuple/list & dict describing the arguments of f h : derivation step, if h == 0 give x * sqrt(epsilon) (source : http://en.wikipedia.org/wiki/Numerical_differentiation) ''' # tuple => list pour plus d'aisance args_f = list(args_f) # operational arguments args_f_m = list(args_f) args_f_p = list(args_f) kwargs_f_m = dict(kwargs_f) kwargs_f_p = dict(kwargs_f) args_name_list = list(f.__code__.co_varnames) # var in is a int if type(var_in) is int: var_ind = var_in var_name = args_name_list[var_ind] # var in is a str else: var_name = var_in try: var_ind = args_name_list.index(var_name) except ValueError: print(args_name_list) raise Exception('wrong var_in name (not in args name list)') # if var_ind < len(args_f): # x = args_f[var_ind] # if h == 0: # h = x * np.sqrt(np.finfo(float).eps) # if h == 0: # print 'WARN : h == 0 ! setting @ 10**-6 ' # h = 10**-6 # args_f_m[var_ind] = x - h # args_f_p[var_ind] = x + h # else: # x = kwargs_f[var_name] # if h == 0: # h = x * np.sqrt(np.finfo(float).eps) # if h == 0: # print 'WARN : h == 0 ! setting @ 10**-6 ' # h = 10**-6 # kwargs_f_m[var_name] = x - h # kwargs_f_p[var_name] = x + h if var_ind < len(args_f): x = args_f[var_ind] else: x = kwargs_f[var_name] if h == 0: h = x * np.sqrt(np.finfo(float).eps) if h == 0: print('WARN : h == 0 ! setting @ 10**-6 ') h = 10**-6 if var_ind < len(args_f): args_f_m[var_ind] = x - h args_f_p[var_ind] = x + h else: kwargs_f_m[var_name] = x - h kwargs_f_p[var_name] = x + h m = f(*args_f_m,**kwargs_f_m) p = f(*args_f_p,**kwargs_f_p) if utils.is_iterable(m): m = m[var_out] p = p[var_out] # print p,m,h,x # print h # print h == 0 dout = (p - m) / (2. * float(h)) return dout
def partial_derive(f,var_in,var_out=0,kwargs_f={},args_f=[],h=0,accur=-1): """ This function computes the partial derivatives of a python function Parameters ---------- f : Python function the python function which will be derivated. this function must return a scalar. the parameter of f suseptibles to be derivated must be scalars. i.e. if for isntace you want to derivate a position vector X = [x,y,z] f must take as argument f(x,y,z) and not f(X) var_in : int or string the detrivation is with respect to this variable can be a int (starts with 0) or a string describing the name of the var in f arguments. var_out : int the output of f which needs to be considerated as the output ** must be an int ** The default is 0. kwargs_f : dict, optional dictionary describing the arguments of f. The default is {}. args_f : iterable, optional tuple/list & dict describing the arguments of f. The default is []. h : float, optional derivation step, if h == 0 give x * sqrt(epsilon) (source : http://en.wikipedia.org/wiki/Numerical_differentiation) . accur : int, optional accuracy coefficient index. -1 is the best but the slowest. The default is -1. https://en.wikipedia.org/wiki/Finite_difference_coefficient Returns ------- dout : float the derivative of f w.r.t. var_in. """ # tuple => list pour plus d'aisance args_f = list(args_f) # operational arguments args_f_i = list(args_f) kwargs_f_i = dict(kwargs_f) args_name_list = list(f.__code__.co_varnames) # var in is a int if type(var_in) is int: var_ind = var_in var_name = args_name_list[var_ind] # var in is a str else: var_name = var_in try: var_ind = args_name_list.index(var_name) except ValueError: print(args_name_list) raise Exception('wrong var_in name (not in args name list)') if var_ind < len(args_f): x = args_f[var_ind] else: x = kwargs_f[var_name] if utils.is_iterable(x): print("ERR: partial_derive: var_in is not a scalar") raise Exception if h == 0: h = x * np.sqrt(np.finfo(float).eps) if h == 0: print('WARN : h == 0 ! setting @ 10**-6 ') h = 10**-6 res_stk = [] accur_coeff = get_accur_coeff(accur) for i,k in enumerate(accur_coeff): ### if the coeff is nul, no computation if k == 0: res_stk.append(0.) else: if var_ind < len(args_f): args_f_i[var_ind] = x + h * float(i-4) else: kwargs_f_i[var_name] = x + h * float(i-4) ### i-4 because i=0 is actually the index -4 ### cf Wikipedia table ### https://en.wikipedia.org/wiki/Finite_difference_coefficient res = f(*args_f_i,**kwargs_f_i) if utils.is_iterable(res): res = res[var_out] res_stk.append(res) dout = np.dot(np.array(res_stk) , accur_coeff) / h return dout
def ESMGFZ_extrapolator(path_or_netcdf_object_in, time_xtrp, lat_xtrp, lon_xtrp, wished_values=("duV","duNS","duEW"), output_type = "DataFrame", debug=False,verbose=True, time_smart=True, interp_method="splinef2d"): """ Extrapolate loading values from the EMSGFZ models esmdata.gfz-potsdam.de:8080/ Parameters ---------- path_or_netcdf_object_in : string, list of strings or NetCDF object Input can be a file path (string), a list of string (will be concatenated) or direcly the NetCDF object (faster). time_xtrp : float or float iterable time for the wished extrapolated values for daily files: hours of day [0..23]. for yearly files: day of years [0..364]. lat_xtrp : float or float iterable latitude component for the wished extrapolated values ranging from [-90..90] lon_xtrp : float or float iterable longitude component for the wished extrapolated values. ranging from [-180..180] wished_values : tuple of string, optional the components of the extrapolated values. The default is ("duV","duNS","duEW"). output_type : str, optional Choose the output type. "DataFrame","dict","array","tuple","list" The default is "DataFrame". debug : bool, optional returns the NetCDF object for debug purposes Returns ------- Points_out : see output_type The extrapolated values. """ if not utils.is_iterable(time_xtrp): time_xtrp = [time_xtrp] if not utils.is_iterable(lat_xtrp): lat_xtrp = [lat_xtrp] if not utils.is_iterable(lon_xtrp): lon_xtrp = [lon_xtrp] Points_xtrp = (time_xtrp,lat_xtrp,lon_xtrp) if type(path_or_netcdf_object_in) is str: NC = nc.Dataset(path_or_netcdf_object_in) elif type(path_or_netcdf_object_in) in (nc.Dataset,nc.MFDataset): NC = path_or_netcdf_object_in else: NC = nc.MFDataset(sorted(path_or_netcdf_object_in)) if debug: return NC time_orig = np.array(NC['time'][:]) if time_smart: # we work in MJD start_date = conv.dt2MJD(conv.str_date2dt(NC['time'].units[11:])) if len(time_orig) <= 366: time = start_date - .0 + time_orig else: time = start_date - .0 + np.array(range(len(time_orig))) lat = np.flip(np.array(NC['lat'][:])) ### we flip the lat because not ascending ! lon = np.array(NC['lon'][:]) Points = (time,lat,lon) WishVals_Stk = [] WishVals_dic = dict() #### prepare dedicated time, lat, lon columns Points_xtrp_array = np.array(list(itertools.product(*Points_xtrp))) WishVals_dic['time'] = Points_xtrp_array[:,0] WishVals_dic['lat'] = Points_xtrp_array[:,1] WishVals_dic['lon'] = Points_xtrp_array[:,2] ### do the interpolation for the wished value for wishval in wished_values: if verbose: print("INFO:",wishval,"start interpolation at",dt.datetime.now()) #### Val = np.array(NC[wishval]) ### Slow Val = NC[wishval][:] if verbose: print("INFO:",wishval,"grid loaded at",dt.datetime.now()) Val = np.flip(Val,1) ### we flip the lat because not ascending ! Val_xtrp = scipy.interpolate.interpn(Points,Val, Points_xtrp, method=interp_method) WishVals_Stk.append(Val_xtrp) WishVals_dic[wishval] = Val_xtrp #### choose the output if output_type == "DataFrame": Points_out = pd.DataFrame(WishVals_dic) if time_smart: Points_out['time_dt'] = conv.MJD2dt(Points_out['time']) elif output_type == "dict": Points_out = WishVals_dic elif output_type == "array": Points_out = np.column_stack(WishVals_Stk) elif output_type == "tuple": Points_out = tuple(WishVals_Stk) elif output_type == "list": Points_out = list(WishVals_Stk) else: Points_out = WishVals_Stk return Points_out
def track_runner(rnx_rover,rnx_base,working_dir,experience_prefix, XYZbase = [], XYZrover = [] , outtype = 'XYZ',mode = 'short', interval=None,antmodfile = "~/gg/tables/antmod.dat", calc_center='igs' , forced_sp3_path = '', const="G",silent=False,rinex_full_path=False, run_on_gfz_cluster=False,forced_iono_path=''): # paths & files working_dir = utils.create_dir(working_dir) temp_dir = utils.create_dir(os.path.join(working_dir,'TEMP')) out_dir = utils.create_dir(os.path.join(working_dir,'OUTPUT')) if operational.check_if_compressed_rinex(rnx_rover): rnx_rover = operational.crz2rnx(rnx_rover,temp_dir) else: shutil.copy(rnx_rover,temp_dir) if operational.check_if_compressed_rinex(rnx_base): rnx_base = operational.crz2rnx(rnx_base,temp_dir) else: shutil.copy(rnx_base,temp_dir) # RINEX START & END rov_srt, rov_end , rov_itv = operational.rinex_start_end(rnx_rover,1) bas_srt, bas_end , bas_itv = operational.rinex_start_end(rnx_base,1) # RINEX NAMES rov_name = os.path.basename(rnx_rover)[0:4] bas_name = os.path.basename(rnx_base)[0:4] rov_name_uper = rov_name.upper() bas_name_uper = bas_name.upper() srt_str = rov_srt.strftime("%Y_%j") exp_full_name = '_'.join((experience_prefix,rov_name,bas_name,srt_str)) out_conf_fil = os.path.join(out_dir,exp_full_name + '.cmd') out_result_fil = os.path.join(out_dir,exp_full_name + '.out' ) print(out_conf_fil) confobj = open(out_conf_fil,'w+') # Obs Files confobj.write(' obs_file' + '\n') ### just the basename, the caracter nb is limited (20210415) if not rinex_full_path: confobj.write(' '.join((' ',bas_name_uper,os.path.basename(rnx_base) ,'F'))+ '\n') confobj.write(' '.join((' ',rov_name_uper,os.path.basename(rnx_rover),'K'))+ '\n') else: confobj.write(' '.join((' ',bas_name_uper,rnx_base ,'F'))+ '\n') confobj.write(' '.join((' ',rov_name_uper,rnx_rover,'K'))+ '\n') confobj.write('\n') date = conv.rinexname2dt(os.path.basename(rnx_rover)) # Nav File if forced_sp3_path == '': strt_rnd = dt.datetime(*bas_srt.timetuple()[:3]) end_rnd = dt.datetime(*bas_end.timetuple()[:3]) orblis = operational.multi_downloader_orbs_clks( temp_dir , strt_rnd , end_rnd , archtype='/', calc_center = calc_center) #sp3Z = orblis[0] sp3 = [utils.uncompress(sp3Z) for sp3Z in orblis] sp3 = [e if ".sp3" in e[-5:] else e + ".sp3" for e in sp3] else: if utils.is_iterable(forced_sp3_path): sp3 = forced_sp3_path else: sp3 = [forced_sp3_path] for sp3_mono in sp3: confobj.write(' '.join((' ','nav_file',sp3_mono ,' sp3'))+ '\n') confobj.write('\n') # Iono file if forced_iono_path != '': confobj.write(' ionex_file ' + forced_iono_path + '\n' ) # Mode confobj.write(' mode ' + mode + '\n') confobj.write('\n') # Output confobj.write(' pos_root ' + exp_full_name +'.pos' + '\n' ) confobj.write(' res_root ' + exp_full_name +'.res' + '\n' ) confobj.write(' sum_file ' + exp_full_name +'.sum' + '\n' ) confobj.write('\n') # Outtype confobj.write(' out_type ' + outtype + '\n') confobj.write('\n') # Interval if not interval: confobj.write(' interval ' + str(rov_itv) + '\n') else: confobj.write(' interval ' + str(interval) + '\n') confobj.write('\n') # Coords bool_site_pos = False if XYZbase != []: if not bool_site_pos: confobj.write(' site_pos \n') bool_site_pos = True XYZbase = [str(e) for e in XYZbase] confobj.write(' '.join([' ', bas_name_uper] + XYZbase + ['\n'])) if XYZrover != []: if not bool_site_pos: confobj.write(' site_pos \n') bool_site_pos = True XYZrover = [str(e) for e in XYZrover] confobj.write(' '.join([' ', rov_name_uper] + XYZrover + ['\n'])) if bool_site_pos: confobj.write('\n') # Offsets confobj.write(' ante_off \n') Antobj_rov , Recobj_rov , Siteobj_rov , Locobj_rov = \ files_rw.read_rinex_2_dataobjts(rnx_rover) confobj.write(' '.join([' ', rov_name_uper , str(Antobj_rov.North_Ecc) , str(Antobj_rov.East_Ecc) , str(Antobj_rov.Up_Ecc) , Antobj_rov.Antenna_Type , '\n'])) Antobj_bas , Recobj_bas , Siteobj_bas , Locobj_bas = \ files_rw.read_rinex_2_dataobjts(rnx_base) confobj.write(' '.join([' ', bas_name_uper , str(Antobj_bas.North_Ecc) , str(Antobj_bas.East_Ecc) , str(Antobj_bas.Up_Ecc) , Antobj_bas.Antenna_Type , '\n'])) confobj.write('\n') # Site_stats confobj.write(' site_stats \n') confobj.write(' ' + bas_name_uper + " 0.1 0.1 0.1 0 0 0" + '\n') confobj.write(' ' + rov_name_uper + " 20 20 20 0.5 0.5 0.5" + '\n') confobj.write('\n') # constellqtions confobj.write(" TR_GNSS " + const + '\n') # Misc #confobj.write(" USE_GPTGMF" + '\n') confobj.write(" ATM_MODELC GMF 0.5" + '\n') confobj.write(" ANTMOD_FILE " + antmodfile + '\n') confobj.write(" DCB_FILE " + "~/gg/incremental_updates/tables/dcb.dat.gnss" + '\n') confobj.write(" atm_stats" + '\n') confobj.write(' all 0.1 0.00030.00023' + '\n') confobj.close() #END OF FILE WRITING dowstring = ''.join([str(e) for e in conv.dt2gpstime(date)]) bigcomand = ' '.join(("track -f" , out_conf_fil , '-d' , conv.dt2doy(date) ,'-w', dowstring)) if run_on_gfz_cluster: bigcomand = "cjob -c '" + bigcomand + "'" executable="/bin/csh" else: executable="/bin/bash" print('INFO : command launched :') print(bigcomand) # START OF PROCESSING if not silent: os.chdir(temp_dir) try: subprocess.call([bigcomand], executable=executable, shell=True,timeout=60*20) except subprocess.TimeoutExpired: print("WARN: command timeout expired, skip") pass outfiles = [] outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*sum*')) outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*pos*')) outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*cmd*')) Antobj_rov , Recobj_rov , Siteobj_rov , Locobj_rov = \ files_rw.read_rinex_2_dataobjts(rnx_rover) [shutil.copy(e,out_dir) for e in outfiles] [os.remove(e) for e in outfiles] print("TRACK RUN FINISHED") print('results available in ' , out_dir) else: print("Silent mode ON: nothing is launched") return bigcomand
def rotate_points(alphal, betal, gammal, pointlin, Rtype='R1', xyzreftuple=([1, 0, 0], [0, 1, 0], [0, 0, 1]), angtype='deg', fullout=False): ''' R1 = Rz(g) * Ry(b) * Rx(a) si les RPY sont donnés dans le NED alors les positions résultantes sont dans le NED R2 = matrice RPY2ENU si les RPY sont donnés dans le NED alors les résultantes sont DANS LE ENU pas besoin de rotation NED2ENU Grewal et al. 2007 Entrée : Angles n = A liste de listes de P * [ points ] Sortie : liste de listes [ [ xA ] [ xA ] ... xP [ xA ] ] ''' xaxis, yaxis, zaxis = xyzreftuple if not utils.is_iterable(alphal): alphal = np.array([alphal]) betal = np.array([betal]) gammal = np.array([gammal]) boolnotiterable = True else: boolnotiterable = False pointlout = [] R_out = [] for pt in pointlin: if not utils.is_iterable(pt) or len(pt) != 3: print("ERR : rotate_points : pts != 3 coords") return 0 pointltmp = [] for a, b, g in zip(alphal, betal, gammal): R1 = rotmat3(a, b, g, angtype=angtype, xyzreftuple=xyzreftuple) R2 = conv.C_rpy2enu(a, b, g, angtype=angtype) if Rtype == 'R1': R = R1 elif Rtype == 'R2': R = R2 R_out.append(R) pointltmp.append(np.dot(R, pt)) pointlout.append(pointltmp) if boolnotiterable: pointlout = pointltmp pointlout = np.array(pointlout) if fullout: return pointlout, R_out else: return pointlout