def multi_sumfiles_trend_extract(inp_dir, out_dir, out_prefix, raw_neu_dir='', specific_stats=(), invert_specific=False, style='epc'): ''' style = globk OR epc make a GLOBK style .vel file or make a dirty velocity file compatible with EPC ''' utils.create_dir(out_dir) statdico = sumfiles_to_statdico(inp_dir, specific_stats, invert_specific=invert_specific) listoftup = [] for stat, listof3ENUsumfile in statdico.items(): tup = sumfiles_trend_extract(listof3ENUsumfile) if tup[0] != stat: raise Exception('tup[0] != stat') listoftup.append(tup) listoftup.sort(key=lambda x: x[0]) velfile_from_a_list_of_statVsV_tuple(listoftup, out_dir, out_prefix, raw_neu_dir, style)
def track_runner(rnx_rover,rnx_base,working_dir,experience_prefix, XYZbase = [], XYZrover = [] , outtype = 'XYZ',mode = 'short', interval=None,antmodfile = "~/gg/tables/antmod.dat", calc_center='igs' , forced_sp3_path = '', const="G",silent=False,rinex_full_path=False, run_on_gfz_cluster=False,forced_iono_path=''): # paths & files working_dir = utils.create_dir(working_dir) temp_dir = utils.create_dir(os.path.join(working_dir,'TEMP')) out_dir = utils.create_dir(os.path.join(working_dir,'OUTPUT')) if operational.check_if_compressed_rinex(rnx_rover): rnx_rover = operational.crz2rnx(rnx_rover,temp_dir) else: shutil.copy(rnx_rover,temp_dir) if operational.check_if_compressed_rinex(rnx_base): rnx_base = operational.crz2rnx(rnx_base,temp_dir) else: shutil.copy(rnx_base,temp_dir) # RINEX START & END rov_srt, rov_end , rov_itv = operational.rinex_start_end(rnx_rover,1) bas_srt, bas_end , bas_itv = operational.rinex_start_end(rnx_base,1) # RINEX NAMES rov_name = os.path.basename(rnx_rover)[0:4] bas_name = os.path.basename(rnx_base)[0:4] rov_name_uper = rov_name.upper() bas_name_uper = bas_name.upper() srt_str = rov_srt.strftime("%Y_%j") exp_full_name = '_'.join((experience_prefix,rov_name,bas_name,srt_str)) out_conf_fil = os.path.join(out_dir,exp_full_name + '.cmd') out_result_fil = os.path.join(out_dir,exp_full_name + '.out' ) print(out_conf_fil) confobj = open(out_conf_fil,'w+') # Obs Files confobj.write(' obs_file' + '\n') ### just the basename, the caracter nb is limited (20210415) if not rinex_full_path: confobj.write(' '.join((' ',bas_name_uper,os.path.basename(rnx_base) ,'F'))+ '\n') confobj.write(' '.join((' ',rov_name_uper,os.path.basename(rnx_rover),'K'))+ '\n') else: confobj.write(' '.join((' ',bas_name_uper,rnx_base ,'F'))+ '\n') confobj.write(' '.join((' ',rov_name_uper,rnx_rover,'K'))+ '\n') confobj.write('\n') date = conv.rinexname2dt(os.path.basename(rnx_rover)) # Nav File if forced_sp3_path == '': strt_rnd = dt.datetime(*bas_srt.timetuple()[:3]) end_rnd = dt.datetime(*bas_end.timetuple()[:3]) orblis = operational.multi_downloader_orbs_clks( temp_dir , strt_rnd , end_rnd , archtype='/', calc_center = calc_center) #sp3Z = orblis[0] sp3 = [utils.uncompress(sp3Z) for sp3Z in orblis] sp3 = [e if ".sp3" in e[-5:] else e + ".sp3" for e in sp3] else: if utils.is_iterable(forced_sp3_path): sp3 = forced_sp3_path else: sp3 = [forced_sp3_path] for sp3_mono in sp3: confobj.write(' '.join((' ','nav_file',sp3_mono ,' sp3'))+ '\n') confobj.write('\n') # Iono file if forced_iono_path != '': confobj.write(' ionex_file ' + forced_iono_path + '\n' ) # Mode confobj.write(' mode ' + mode + '\n') confobj.write('\n') # Output confobj.write(' pos_root ' + exp_full_name +'.pos' + '\n' ) confobj.write(' res_root ' + exp_full_name +'.res' + '\n' ) confobj.write(' sum_file ' + exp_full_name +'.sum' + '\n' ) confobj.write('\n') # Outtype confobj.write(' out_type ' + outtype + '\n') confobj.write('\n') # Interval if not interval: confobj.write(' interval ' + str(rov_itv) + '\n') else: confobj.write(' interval ' + str(interval) + '\n') confobj.write('\n') # Coords bool_site_pos = False if XYZbase != []: if not bool_site_pos: confobj.write(' site_pos \n') bool_site_pos = True XYZbase = [str(e) for e in XYZbase] confobj.write(' '.join([' ', bas_name_uper] + XYZbase + ['\n'])) if XYZrover != []: if not bool_site_pos: confobj.write(' site_pos \n') bool_site_pos = True XYZrover = [str(e) for e in XYZrover] confobj.write(' '.join([' ', rov_name_uper] + XYZrover + ['\n'])) if bool_site_pos: confobj.write('\n') # Offsets confobj.write(' ante_off \n') Antobj_rov , Recobj_rov , Siteobj_rov , Locobj_rov = \ files_rw.read_rinex_2_dataobjts(rnx_rover) confobj.write(' '.join([' ', rov_name_uper , str(Antobj_rov.North_Ecc) , str(Antobj_rov.East_Ecc) , str(Antobj_rov.Up_Ecc) , Antobj_rov.Antenna_Type , '\n'])) Antobj_bas , Recobj_bas , Siteobj_bas , Locobj_bas = \ files_rw.read_rinex_2_dataobjts(rnx_base) confobj.write(' '.join([' ', bas_name_uper , str(Antobj_bas.North_Ecc) , str(Antobj_bas.East_Ecc) , str(Antobj_bas.Up_Ecc) , Antobj_bas.Antenna_Type , '\n'])) confobj.write('\n') # Site_stats confobj.write(' site_stats \n') confobj.write(' ' + bas_name_uper + " 0.1 0.1 0.1 0 0 0" + '\n') confobj.write(' ' + rov_name_uper + " 20 20 20 0.5 0.5 0.5" + '\n') confobj.write('\n') # constellqtions confobj.write(" TR_GNSS " + const + '\n') # Misc #confobj.write(" USE_GPTGMF" + '\n') confobj.write(" ATM_MODELC GMF 0.5" + '\n') confobj.write(" ANTMOD_FILE " + antmodfile + '\n') confobj.write(" DCB_FILE " + "~/gg/incremental_updates/tables/dcb.dat.gnss" + '\n') confobj.write(" atm_stats" + '\n') confobj.write(' all 0.1 0.00030.00023' + '\n') confobj.close() #END OF FILE WRITING dowstring = ''.join([str(e) for e in conv.dt2gpstime(date)]) bigcomand = ' '.join(("track -f" , out_conf_fil , '-d' , conv.dt2doy(date) ,'-w', dowstring)) if run_on_gfz_cluster: bigcomand = "cjob -c '" + bigcomand + "'" executable="/bin/csh" else: executable="/bin/bash" print('INFO : command launched :') print(bigcomand) # START OF PROCESSING if not silent: os.chdir(temp_dir) try: subprocess.call([bigcomand], executable=executable, shell=True,timeout=60*20) except subprocess.TimeoutExpired: print("WARN: command timeout expired, skip") pass outfiles = [] outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*sum*')) outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*pos*')) outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*cmd*')) Antobj_rov , Recobj_rov , Siteobj_rov , Locobj_rov = \ files_rw.read_rinex_2_dataobjts(rnx_rover) [shutil.copy(e,out_dir) for e in outfiles] [os.remove(e) for e in outfiles] print("TRACK RUN FINISHED") print('results available in ' , out_dir) else: print("Silent mode ON: nothing is launched") return bigcomand
def rtklib_run_from_rinex(rnx_rover,rnx_base,generik_conf,working_dir, experience_prefix="",rover_auto_conf=False, base_auto_conf=True,XYZbase=[0,0,0],outtype = 'auto', calc_center='igs'): """ auto_conf : read the header of the rinex and write the conf. file according to it if the mode is disabled, the antenna/rec part of the conf. file will be the same as the generic one NB : RTKLIB "core" have it's own reading header option. Thus, my advice is to disable the auto mode for the rover and leave ``ant1-postype=rinexhead`` in the generic conf file and enable it for the base with a XYZbase vector initialized or the good XYZ in header on the rinex (the XYZbase vector is prioritary over the base RINEX header) (but in anycase, prepars good rinex's headers ;) ) outtype : 'auto' (as defined in the generic config file) or 'dms' 'deg' 'xyz' 'enu' can manage the upper case XYZ or FLH """ # paths & files working_dir = utils.create_dir(working_dir) temp_dir = utils.create_dir(os.path.join(working_dir,'TEMP_' + utils.get_timestamp())) out_dir = utils.create_dir(os.path.join(working_dir,'OUTPUT')) # uncompressing rinex if compressed if operational.check_if_compressed_rinex(rnx_rover): rnx_rover = operational.crz2rnx(rnx_rover,temp_dir) if operational.check_if_compressed_rinex(rnx_base): rnx_base = operational.crz2rnx(rnx_base,temp_dir) # RINEX START & END rov_srt, rov_end , rov_itv = operational.rinex_start_end(rnx_rover,1) bas_srt, bas_end , bas_itv = operational.rinex_start_end(rnx_base,1) # RINEX NAMES rov_name = os.path.basename(rnx_rover)[0:4] bas_name = os.path.basename(rnx_base)[0:4] # paths & files #temp_dir = os.path.join(working_dir,'TEMP') #if clean_temp_dir: # shutil.rmtree(temp_dir) # temp_dir = os.path.join(working_dir,'TEMP') #out_dir = os.path.join(working_dir,'OUTPUT') srt_str = rov_srt.strftime("%Y_%j") exp_full_name = '_'.join((experience_prefix,rov_name,bas_name,srt_str)) out_conf_fil = os.path.join(out_dir,exp_full_name + '.conf') out_result_fil = os.path.join(out_dir,exp_full_name + '.out' ) dicoconf = operational.read_conf_file(generik_conf) if rover_auto_conf: Antobj_rov , Recobj_rov , Siteobj_rov , Locobj_rov = \ files_rw.read_rinex_2_dataobjts(rnx_rover) dicoconf['ant1-postype'] ='xyz' dicoconf['ant1-anttype'] = Antobj_rov.Antenna_Type dicoconf['ant1-pos1'] = Locobj_rov.X_coordinate_m dicoconf['ant1-pos2'] = Locobj_rov.Y_coordinate_m dicoconf['ant1-pos3'] = Locobj_rov.Z_coordinate_m dicoconf['ant1-antdelu'] = Antobj_rov.Up_Ecc dicoconf['ant1-antdeln'] = Antobj_rov.North_Ecc dicoconf['ant1-antdele'] = Antobj_rov.East_Ecc if not outtype.lower() == 'auto': dicoconf['out-solformat'] = outtype.lower() print('out-solformat' , dicoconf['out-solformat']) if base_auto_conf: Antobj_bas , Recobj_bas , Siteobj_bas , Locobj_bas = \ files_rw.read_rinex_2_dataobjts(rnx_base) dicoconf['ant2-postype'] ='xyz' dicoconf['ant2-anttype'] = Antobj_bas.Antenna_Type if XYZbase[0] != 0: dicoconf['ant2-pos1'] = XYZbase[0] dicoconf['ant2-pos2'] = XYZbase[1] dicoconf['ant2-pos3'] = XYZbase[2] else: dicoconf['ant2-pos1'] = Locobj_bas.X_coordinate_m dicoconf['ant2-pos2'] = Locobj_bas.Y_coordinate_m dicoconf['ant2-pos3'] = Locobj_bas.Z_coordinate_m dicoconf['ant2-antdelu'] = Antobj_bas.Up_Ecc dicoconf['ant2-antdeln'] = Antobj_bas.North_Ecc dicoconf['ant2-antdele'] = Antobj_bas.East_Ecc if not (bas_srt <= rov_srt <= rov_end <= bas_end): print('WARN : not bas_srt <= rov_srt <= rov_end <= bas_end !!!') outconffilobj = open(out_conf_fil,'w+') for k,v in dicoconf.items(): lin = k.ljust(20)+'='+str(v)+'\n' outconffilobj.write(lin) outconffilobj.close() # ORBITS # SP3 orblis = operational.multi_downloader_orbs_clks( temp_dir , bas_srt , bas_end , archtype='/', calc_center = calc_center) sp3Z = orblis[0] sp3 = utils.uncompress(sp3Z) # BRDC statdic = dict() statdic['nav'] = ['BRDC'] nav_srt = dt.datetime(bas_srt.year, bas_srt.month , bas_srt.day ) orblis = operational.multi_downloader_rinex(statdic,temp_dir , nav_srt , bas_end , archtype='/', sorted_mode=0) navZ = orblis[0] nav = utils.uncompress(navZ) # Command com_config = "-k " + out_conf_fil com_interval="-ti " + str(rov_itv) com_mode = "" #com_mode="-p 4" com_resultfile="-o " + out_result_fil #com_combinsol="-c" exe_path = "rnx2rtkp" # exe_path = "/home/pierre/install_softs/RTKLIB/rnx2rtkp" bigcomand = ' '.join((exe_path,com_config,com_interval,com_mode, com_resultfile,rnx_rover,rnx_base,nav,sp3)) print(bigcomand) subprocess.call([bigcomand], executable='/bin/bash', shell=True) print("RTKLIB RUN FINISHED") return None
def sp3_overlap_creator(ac_list, dir_in, dir_out, suffix_out_input=None, overlap_size=7200, force=False, manage_missing_sats='common_sats_only', eliminate_null_sat=True, severe=False, separated_systems_export=False, first_date=None): """ Generate an SP3 Orbit file with overlap based on the SP3s of the days before and after Parameters ---------- ac_list : list 3-character codes of the ACs. dir_in : str where the input sp3 are. dir_out : str where the output sp3 will be outputed. suffix_out_input : str, optional last char of the 3-char. code. if None, then it is the same as input. overlap_size : int, optional Overlapsize. The default is 7200. force : True, optional force overwrite. The default is False. manage_missing_sats : str, optional 'exclude' : generate a file with only the common sat between the 3 days. Thus, exclude the missing sats 'extrapolate' : extrapolate the missing sats based on the first/last epoch The default is 'common_sats_only'. eliminate_null_sat : bool, optional eliminate null sat. The default is True. severe : bool, optional raise an exception if problem. The default is False. separated_systems_export : bool, optional export different sp3 for different system. The default is False. first_date : datetime, optional exclude SP3 before this epoch Returns ------- None. Note ---- start/end date are not implemented the force option skips existing files """ Dict_Lfiles_ac = dict() for ac in ac_list: Dict_Lfiles_ac[ac] = [] Lfile = Dict_Lfiles_ac[ac] Extlist = ["sp3", "SP3", "sp3.gz", "SP3.gz"] for ext in Extlist: Lfile = Lfile + utils.find_recursive(dir_in, "*" + ac + "*" + ext) print("Nb of SP3 found for", ac, len(Lfile)) if not suffix_out_input: suffix_out = ac else: suffix_out = ac[:2] + suffix_out_input D = [] WWWWD = [] for sp3 in Lfile: #wwwwd_str = os.path.basename(sp3)[3:8] #D.append(conv.gpstime2dt(int(wwwwd_str[:4]),int(wwwwd_str[4:]))) dat = conv.sp3name2dt(sp3) D.append(dat) for dat in D[1:-1]: ####if selection manuel, zip > 2lists !!! try: print("***********", ac, dat) if first_date: if dat < first_date: print("INFO: SKIP date", dat) continue wwwwd_str = conv.dt_2_sp3_datestr(dat).zfill(5) dat_bef = dat - dt.timedelta(days=1) dat_aft = dat + dt.timedelta(days=1) wwwwd_str_bef = utils.join_improved( "", *conv.dt2gpstime(dat_bef)).zfill(5) wwwwd_str_aft = utils.join_improved( "", *conv.dt2gpstime(dat_aft)).zfill(5) ###### check if exists dir_out_wk = os.path.join(dir_out, "wk" + str(wwwwd_str)[:4]) utils.create_dir(dir_out_wk) fil_out = dir_out_wk + "/" + suffix_out + wwwwd_str + ".sp3" if not force and os.path.isfile(fil_out): print("0))", fil_out, "exists, skipping...") continue ### *************** STEP 1 *************** print("1)) Search for the days before/after") print("1))", dat_bef, dat_aft) p1 = utils.find_regex_in_list(wwwwd_str + ".sp3", Lfile, True) p_bef = utils.find_regex_in_list(wwwwd_str_bef + ".sp3", Lfile, True) p_aft = utils.find_regex_in_list(wwwwd_str_aft + ".sp3", Lfile, True) print("1)) Files found for the days before/after") print("0b)", p_bef) print("01)", p1) print("0a)", p_aft) if not p1 or not p_bef or not p_aft: print("ERROR with day", dat) continue SP3 = files_rw.read_sp3(p1) SP3_bef = files_rw.read_sp3(p_bef) SP3_aft = files_rw.read_sp3(p_aft) ### Filtering to keep P only SP3 = SP3[SP3.type == "P"] SP3_bef = SP3_bef[SP3_bef.type == "P"] SP3_aft = SP3_aft[SP3_aft.type == "P"] SP3_bef = SP3_bef[SP3_bef["epoch"] < SP3["epoch"].min()] SP3_aft = SP3_aft[SP3_aft["epoch"] > SP3["epoch"].max()] SP3concat = pd.concat((SP3_bef, SP3, SP3_aft)) dat_filter_bef = dat - dt.timedelta(seconds=overlap_size) dat_filter_aft = dat + dt.timedelta( seconds=overlap_size) + dt.timedelta(days=1) ### *************** STEP 2 *************** print("2)) dates of the overlap period before/after") print("2))", dat_filter_bef, dat_filter_aft) ### *************** STEP 3 *************** print("3)) Dates of: SP3 concatenated, before, current, after") print("3))", SP3concat["epoch"].min(), SP3concat["epoch"].max()) print("3b)", SP3_bef["epoch"].min(), SP3_bef["epoch"].max()) print("31)", SP3["epoch"].min(), SP3["epoch"].max()) print("3a)", SP3_aft["epoch"].min(), SP3_aft["epoch"].max()) SP3concat = SP3concat[(SP3concat["epoch"] >= dat_filter_bef) & (SP3concat["epoch"] <= dat_filter_aft)] ########## HERE WE MANAGE THE MISSING SATS if manage_missing_sats == "exclude": print("4))", "remove missing sats ") common_sats = set(SP3_bef["sat"]).intersection( set(SP3["sat"])).intersection(set(SP3_aft["sat"])) SP3concat = SP3concat[SP3concat["sat"].isin(common_sats)] elif manage_missing_sats == "extrapolate": print("4))", "extrapolate missing sats ") for iovl, SP3_ovl in enumerate((SP3_bef, SP3_aft)): if iovl == 0: backward = True forward = False backfor = "backward" elif iovl == 1: backward = False forward = True backfor = "forward" Sats = set(SP3["sat"]) Sats_ovl = set(SP3_ovl["sat"]) Sats_miss = Sats.difference(Sats_ovl) if not Sats_miss: continue print("4a)", "extrapolate missing sats", backfor, Sats_miss) SP3extrapo_in = SP3concat[SP3concat["sat"].isin( Sats_miss)] #step = utils.most_common(SP3concat["epoch"].diff().dropna()) #step = step.astype('timedelta64[s]').astype(np.int32) step = 900 #print(step) #print("SP3extrapo_in",SP3extrapo_in) SP3extrapo = reffram.extrapolate_sp3_DataFrame( SP3extrapo_in, step=step, n_step=int(overlap_size / step), backward=backward, forward=forward, until_backward=dat_filter_bef, until_forward=dat_filter_aft, return_all=False) SP3concat = pd.concat((SP3concat, SP3extrapo)) print(SP3extrapo) else: print("ERR: check manage_missing_sats value") raise Exception if eliminate_null_sat: GoodSats = [] for sat in SP3concat["sat"].unique(): XYZvals = SP3concat[SP3concat["sat"] == sat][[ "x", "y", "z" ]].sum(axis=1) V = np.sum(np.isclose(XYZvals, 0)) / len(XYZvals) if V < 0.50: GoodSats.append(sat) else: print("6) eliminate because null position", sat) SP3concat = SP3concat[SP3concat["sat"].isin(GoodSats)] ### *************** STEP 7 *************** print("7))", "Start/End Epoch of the concatenated file ") print("7))", SP3concat["epoch"].min(), SP3concat["epoch"].max()) #### All systems print("8)) outputed file") print(fil_out) write_sp3(SP3concat, fil_out) #### system separated if False: for sys in SP3concat["const"].unique(): try: SP3concat_sys = SP3concat[SP3concat["const"] == sys] fil_out_sys = dir_out_wk + "/" + suffix_out[:2] + sys.lower( ) + wwwwd_str.zfill(5) + ".sp3" print("9)) outputed file") print(fil_out_sys) write_sp3(SP3concat_sys, fil_out_sys) except: continue except KeyboardInterrupt: raise KeyboardInterrupt except Exception as e: if severe: print("WARN:", e) raise e else: print("WARN: Error", e, "but no severe mode, continue...") continue """
def sp3_overlap_creator(ac_list, dir_in, dir_out, suffix_out_input=None, overlap_size=7200, force=False, common_sats_only=True, eliminate_null_sat=True, severe=False, separated_systems_export=False): """ Generate an SP3 Orbit file with overlap based on the SP3s of the days before and after Parameters ---------- ac_list : list 3-character codes of the ACs. dir_in : str where the input sp3 are. dir_out : str where the output sp3 will be outputed. suffix_out_input : str, optional last char of the 3-char. code. if None, then it is the same as input. overlap_size : int, optional Overlapsize. The default is 7200. force : True, optional force overwrite. The default is False. common_sats_only : True, optional generate a file with only the common sat between the 3 days. The default is True. eliminate_null_sat : bool, optional eliminate null sat. The default is True. severe : bool, optional raise an exception if problem. The default is False. separated_systems_export : bool, optional export different sp3 for different system. The default is False. Returns ------- None. """ for ac in ac_list: Lfile = utils.find_recursive(dir_in, "*" + ac + "*sp3") if not suffix_out_input: suffix_out = ac else: suffix_out = ac[:2] + suffix_out_input D = [] WWWWD = [] for sp3 in Lfile: wwwwd_str = os.path.basename(sp3)[3:8] D.append(conv.gpstime2dt(int(wwwwd_str[:4]), int(wwwwd_str[4:]))) for dat in D[1:-1]: ####if selection manuel, zip > 2lists !!! try: print("******", ac, dat) if conv.dt2gpstime(dat)[0] < 1800: print("SKIP", dat) continue wwwwd_str = conv.dt_2_sp3_datestr(dat) dat_bef = dat - dt.timedelta(days=1) dat_aft = dat + dt.timedelta(days=1) wwwwd_str_bef = utils.join_improved("", *conv.dt2gpstime(dat_bef)) wwwwd_str_aft = utils.join_improved("", *conv.dt2gpstime(dat_aft)) ###### check if exsists dir_out_wk = os.path.join(dir_out, "wk" + str(wwwwd_str)[:4]) utils.create_dir(dir_out_wk) fil_out = dir_out_wk + "/" + suffix_out + wwwwd_str + ".sp3" if not force and os.path.isfile(fil_out): print("0))", fil_out, "exsists, skipping...") continue ### *************** STEP 1 *************** print("1)) Search for the days before/after") print("1))", dat_bef, dat_aft) p1 = utils.find_regex_in_list(wwwwd_str + ".sp3", Lfile, True) p_bef = utils.find_regex_in_list(wwwwd_str_bef + ".sp3", Lfile, True) p_aft = utils.find_regex_in_list(wwwwd_str_aft + ".sp3", Lfile, True) print("1)) Files found for the days before/after") print("0b)", p_bef) print("01)", p1) print("0a)", p_aft) if not p1 or not p_bef or not p_aft: print("ERROR with day", dat) continue SP3 = files_rw.read_sp3(p1) SP3_bef = files_rw.read_sp3(p_bef) SP3_aft = files_rw.read_sp3(p_aft) SP3_bef = SP3_bef[SP3_bef["epoch"] < SP3["epoch"].min()] SP3_aft = SP3_aft[SP3_aft["epoch"] > SP3["epoch"].max()] SP3concat = pd.concat((SP3_bef, SP3, SP3_aft)) dat_filter_bef = dat - dt.timedelta(seconds=overlap_size) dat_filter_aft = dat + dt.timedelta( seconds=overlap_size) + dt.timedelta(days=1) ### *************** STEP 2 *************** print("2)) dates of the overlap period before/after") print("2))", dat_filter_bef, dat_filter_aft) ### *************** STEP 3 *************** print("3)) Dates of: SP3 concatenated, before, current, after") print("3))", SP3concat["epoch"].min(), SP3concat["epoch"].max()) print("3b)", SP3_bef["epoch"].min(), SP3_bef["epoch"].max()) print("31)", SP3["epoch"].min(), SP3["epoch"].max()) print("3a)", SP3_aft["epoch"].min(), SP3_aft["epoch"].max()) SP3concat = SP3concat[(SP3concat["epoch"] >= dat_filter_bef) & (SP3concat["epoch"] <= dat_filter_aft)] if common_sats_only: common_sats = set(SP3_bef["sat"]).intersection( set(SP3["sat"])).intersection(set(SP3_aft["sat"])) SP3concat = SP3concat[SP3concat["sat"].isin(common_sats)] if eliminate_null_sat: GoodSats = [] for sat in SP3concat["sat"].unique(): XYZvals = SP3concat[SP3concat["sat"] == sat][[ "x", "y", "z" ]].sum(axis=1) V = np.sum(np.isclose(XYZvals, 0)) / len(XYZvals) if V < 0.50: GoodSats.append(sat) else: print("6) eliminate because null position", sat) SP3concat = SP3concat[SP3concat["sat"].isin(GoodSats)] ### *************** STEP 7 *************** print("7))", "Start/End Epoch of the concatenated file ") print("7))", SP3concat["epoch"].min(), SP3concat["epoch"].max()) #### All systems dir_out_wk = os.path.join(dir_out, "wk" + str(wwwwd_str)[:4]) utils.create_dir(dir_out_wk) fil_out = dir_out_wk + "/" + suffix_out + wwwwd_str + ".sp3" print("8)) outputed file") print(fil_out) write_sp3(SP3concat, fil_out) #### system separated if False: for sys in SP3concat["const"].unique(): try: SP3concat_sys = SP3concat[SP3concat["const"] == sys] fil_out_sys = dir_out_wk + "/" + suffix_out[:2] + sys.lower( ) + wwwwd_str + ".sp3" print("9)) outputed file") print(fil_out_sys) write_sp3(SP3concat_sys, fil_out_sys) except: continue except KeyboardInterrupt: raise KeyboardInterrupt except Exception as e: if severe: raise e else: print("ERR:", e) raise e """