def rinex_start_end(input_rinex_path, interval_out=False, add_tzinfo=False, verbose=True, safety_mode=True): """ safety_mode : if the epoch reading fails (e.g. in case of a compressed RINEX) activate a reading of the header and the file name as backup. une liste d'epochs en début et fin de fichier => en trouver le min et le max NB : FAIRE UN FONCTION READ EPOCH A L'OCCAZ NBsuite : c'est fait au 161018 mais par contre c'est un dirty copier coller """ epochs_list = [] Head = utils.head(input_rinex_path, 1500) epochs_list_head = rinex_read_epoch(Head, interval_out=interval_out, add_tzinfo=add_tzinfo, out_array=False) Tail = utils.tail(input_rinex_path, 1500) epochs_list_tail = rinex_read_epoch(Tail, interval_out=interval_out, add_tzinfo=add_tzinfo, out_array=False) epochs_list = epochs_list_head + epochs_list_tail if len(epochs_list) == 0: first_epoch = conv.rinexname2dt(input_rinex_path) alphabet = list(string.ascii_lowercase) if os.path.basename(input_rinex_path)[7] in alphabet: last_epoch = first_epoch + dt.timedelta(hours=1) else: last_epoch = first_epoch + dt.timedelta(hours=24, seconds=-1) else: first_epoch = np.min(epochs_list) last_epoch = np.max(epochs_list) if add_tzinfo: first_epoch = first_epoch.replace(tzinfo=dateutil.tz.tzutc()) last_epoch = last_epoch.replace(tzinfo=dateutil.tz.tzutc()) if verbose: print("first & last epochs : ", first_epoch, last_epoch) if not interval_out: return first_epoch, last_epoch else: interv_lis = np.diff(epochs_list) interv_lis = [e.seconds + e.microseconds * 10**-6 for e in interv_lis] interval = utils.most_common(interv_lis) print("interval : ", interval, last_epoch) #return interv_lis , epochs_list return first_epoch, last_epoch, interval
def rinex_timeline_datadico(inputlist_or_paths,use_rinex_lister = True, optional_info=''): """ convention for RINEX datadico : datadico[stat] = [(rinexname1,optional1,date1) ... (rinexnameN,optionalN,dateN)] """ if use_rinex_lister: filelist = rinex_lister(inputlist_or_paths) else: filelist = inputlist_or_paths #rinexfilelist = [fil for fil in filelist if re.search( '.*' + conv.rinex_regex() + '$', fil)] rinexfilelist_old = [fil for fil in filelist if re.search( conv.rinex_regex() , fil)] rinexfilelist_new = [fil for fil in filelist if re.search( conv.rinex_regex_new_name() , fil)] rinexfilelist = rinexfilelist_old + rinexfilelist_new if not use_rinex_lister: rinexfilelist = [os.path.basename(e) for e in rinexfilelist] print('INFO : ', len(rinexfilelist), 'RINEXs will be ploted on the timeline') statname_lis = sorted(list(set([rin[0:4] for rin in rinexfilelist]))) print('INFO : ', len(statname_lis), 'stations will be ploted on the timeline') datadico = dict() for stat in statname_lis: datadico[stat] = [] for rnx in rinexfilelist: try: datadico[rnx[0:4]].append((rnx,optional_info,conv.rinexname2dt(rnx))) except: print('error with : ', rnx) return datadico
def rinex_check_epochs_availability(rinex_path_list): """ Args : A list of rinex paths Returns : T : a table with results """ results_stk = [] for rinex_path in rinex_path_list: rinex_name = os.path.basename(rinex_path) QC = operational.teqc_qc(rinex_path) if not QC: continue epoc_all = int(utils.egrep_big_string("Poss. # of obs epochs" ,QC,only_first_occur=True).split()[-1]) epoc_disp = int(utils.egrep_big_string("Epochs w/ observations",QC,only_first_occur=True).split()[-1]) dt_rnx = conv.rinexname2dt(rinex_name) date_str = conv.dt2str(dt_rnx,"%F") percentage = (float(epoc_disp) / float(epoc_all)) * 100. results = [rinex_name,date_str,epoc_disp,epoc_all,percentage] results_stk.append(results) header = ['RINEX','date','Avbl.', 'Poss.', '%'] T = tabulate.tabulate(results_stk,headers=header) return T
def track_runner(rnx_rover,rnx_base,working_dir,experience_prefix, XYZbase = [], XYZrover = [] , outtype = 'XYZ',mode = 'short', interval=None,antmodfile = "~/gg/tables/antmod.dat", calc_center='igs' , forced_sp3_path = '', const="G",silent=False,rinex_full_path=False, run_on_gfz_cluster=False,forced_iono_path=''): # paths & files working_dir = utils.create_dir(working_dir) temp_dir = utils.create_dir(os.path.join(working_dir,'TEMP')) out_dir = utils.create_dir(os.path.join(working_dir,'OUTPUT')) if operational.check_if_compressed_rinex(rnx_rover): rnx_rover = operational.crz2rnx(rnx_rover,temp_dir) else: shutil.copy(rnx_rover,temp_dir) if operational.check_if_compressed_rinex(rnx_base): rnx_base = operational.crz2rnx(rnx_base,temp_dir) else: shutil.copy(rnx_base,temp_dir) # RINEX START & END rov_srt, rov_end , rov_itv = operational.rinex_start_end(rnx_rover,1) bas_srt, bas_end , bas_itv = operational.rinex_start_end(rnx_base,1) # RINEX NAMES rov_name = os.path.basename(rnx_rover)[0:4] bas_name = os.path.basename(rnx_base)[0:4] rov_name_uper = rov_name.upper() bas_name_uper = bas_name.upper() srt_str = rov_srt.strftime("%Y_%j") exp_full_name = '_'.join((experience_prefix,rov_name,bas_name,srt_str)) out_conf_fil = os.path.join(out_dir,exp_full_name + '.cmd') out_result_fil = os.path.join(out_dir,exp_full_name + '.out' ) print(out_conf_fil) confobj = open(out_conf_fil,'w+') # Obs Files confobj.write(' obs_file' + '\n') ### just the basename, the caracter nb is limited (20210415) if not rinex_full_path: confobj.write(' '.join((' ',bas_name_uper,os.path.basename(rnx_base) ,'F'))+ '\n') confobj.write(' '.join((' ',rov_name_uper,os.path.basename(rnx_rover),'K'))+ '\n') else: confobj.write(' '.join((' ',bas_name_uper,rnx_base ,'F'))+ '\n') confobj.write(' '.join((' ',rov_name_uper,rnx_rover,'K'))+ '\n') confobj.write('\n') date = conv.rinexname2dt(os.path.basename(rnx_rover)) # Nav File if forced_sp3_path == '': strt_rnd = dt.datetime(*bas_srt.timetuple()[:3]) end_rnd = dt.datetime(*bas_end.timetuple()[:3]) orblis = operational.multi_downloader_orbs_clks( temp_dir , strt_rnd , end_rnd , archtype='/', calc_center = calc_center) #sp3Z = orblis[0] sp3 = [utils.uncompress(sp3Z) for sp3Z in orblis] sp3 = [e if ".sp3" in e[-5:] else e + ".sp3" for e in sp3] else: if utils.is_iterable(forced_sp3_path): sp3 = forced_sp3_path else: sp3 = [forced_sp3_path] for sp3_mono in sp3: confobj.write(' '.join((' ','nav_file',sp3_mono ,' sp3'))+ '\n') confobj.write('\n') # Iono file if forced_iono_path != '': confobj.write(' ionex_file ' + forced_iono_path + '\n' ) # Mode confobj.write(' mode ' + mode + '\n') confobj.write('\n') # Output confobj.write(' pos_root ' + exp_full_name +'.pos' + '\n' ) confobj.write(' res_root ' + exp_full_name +'.res' + '\n' ) confobj.write(' sum_file ' + exp_full_name +'.sum' + '\n' ) confobj.write('\n') # Outtype confobj.write(' out_type ' + outtype + '\n') confobj.write('\n') # Interval if not interval: confobj.write(' interval ' + str(rov_itv) + '\n') else: confobj.write(' interval ' + str(interval) + '\n') confobj.write('\n') # Coords bool_site_pos = False if XYZbase != []: if not bool_site_pos: confobj.write(' site_pos \n') bool_site_pos = True XYZbase = [str(e) for e in XYZbase] confobj.write(' '.join([' ', bas_name_uper] + XYZbase + ['\n'])) if XYZrover != []: if not bool_site_pos: confobj.write(' site_pos \n') bool_site_pos = True XYZrover = [str(e) for e in XYZrover] confobj.write(' '.join([' ', rov_name_uper] + XYZrover + ['\n'])) if bool_site_pos: confobj.write('\n') # Offsets confobj.write(' ante_off \n') Antobj_rov , Recobj_rov , Siteobj_rov , Locobj_rov = \ files_rw.read_rinex_2_dataobjts(rnx_rover) confobj.write(' '.join([' ', rov_name_uper , str(Antobj_rov.North_Ecc) , str(Antobj_rov.East_Ecc) , str(Antobj_rov.Up_Ecc) , Antobj_rov.Antenna_Type , '\n'])) Antobj_bas , Recobj_bas , Siteobj_bas , Locobj_bas = \ files_rw.read_rinex_2_dataobjts(rnx_base) confobj.write(' '.join([' ', bas_name_uper , str(Antobj_bas.North_Ecc) , str(Antobj_bas.East_Ecc) , str(Antobj_bas.Up_Ecc) , Antobj_bas.Antenna_Type , '\n'])) confobj.write('\n') # Site_stats confobj.write(' site_stats \n') confobj.write(' ' + bas_name_uper + " 0.1 0.1 0.1 0 0 0" + '\n') confobj.write(' ' + rov_name_uper + " 20 20 20 0.5 0.5 0.5" + '\n') confobj.write('\n') # constellqtions confobj.write(" TR_GNSS " + const + '\n') # Misc #confobj.write(" USE_GPTGMF" + '\n') confobj.write(" ATM_MODELC GMF 0.5" + '\n') confobj.write(" ANTMOD_FILE " + antmodfile + '\n') confobj.write(" DCB_FILE " + "~/gg/incremental_updates/tables/dcb.dat.gnss" + '\n') confobj.write(" atm_stats" + '\n') confobj.write(' all 0.1 0.00030.00023' + '\n') confobj.close() #END OF FILE WRITING dowstring = ''.join([str(e) for e in conv.dt2gpstime(date)]) bigcomand = ' '.join(("track -f" , out_conf_fil , '-d' , conv.dt2doy(date) ,'-w', dowstring)) if run_on_gfz_cluster: bigcomand = "cjob -c '" + bigcomand + "'" executable="/bin/csh" else: executable="/bin/bash" print('INFO : command launched :') print(bigcomand) # START OF PROCESSING if not silent: os.chdir(temp_dir) try: subprocess.call([bigcomand], executable=executable, shell=True,timeout=60*20) except subprocess.TimeoutExpired: print("WARN: command timeout expired, skip") pass outfiles = [] outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*sum*')) outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*pos*')) outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*cmd*')) Antobj_rov , Recobj_rov , Siteobj_rov , Locobj_rov = \ files_rw.read_rinex_2_dataobjts(rnx_rover) [shutil.copy(e,out_dir) for e in outfiles] [os.remove(e) for e in outfiles] print("TRACK RUN FINISHED") print('results available in ' , out_dir) else: print("Silent mode ON: nothing is launched") return bigcomand
def rinex_start_end(input_rinex_path,interval_out=False, add_tzinfo=False,verbose = True, safety_mode = True): """ Return the first and the last epoch of a RINEX file (based on the actual content of the file, not the header) Can handle REINX 2 and 3 Parameters ---------- input_rinex_path : TYPE path of the rinex file. can be the path of a RINEX or directly the RINEX content as a string interval_out : bool, optional output also the intervals. The default is False. add_tzinfo : bool, optional add timezone information in the datetime's Epoches. The default is False. verbose : bool, optional verbose mode. The default is True. safety_mode : TYPE, optional if the epoch reading fails (e.g. in case of a compressed RINEX) activate a reading of the header and the file name as backup. The default is True. Returns ------- first_epoch , last_epoch , [interval] First, las epoches and interval if asked. """ #une liste d'epochs en début et fin de fichier #=> en trouver le min et le max #NB : FAIRE UN FONCTION READ EPOCH A L'OCCAZ #NBsuite : c'est fait au 161018 mais par contre c'est un dirty copier coller epochs_list = [] Head = utils.head(input_rinex_path,1500) epochs_list_head = rinex_read_epoch(Head,interval_out=interval_out, add_tzinfo=add_tzinfo,out_array=False) Tail = utils.tail(input_rinex_path,1500) epochs_list_tail = rinex_read_epoch(Tail,interval_out=interval_out, add_tzinfo=add_tzinfo,out_array=False) epochs_list = epochs_list_head + epochs_list_tail if len(epochs_list) == 0: first_epoch = conv.rinexname2dt(input_rinex_path) alphabet = list(string.ascii_lowercase) if os.path.basename(input_rinex_path)[7] in alphabet: last_epoch = first_epoch + dt.timedelta(hours=1) else: last_epoch = first_epoch + dt.timedelta(hours=24,seconds=-1) else: first_epoch = np.min(epochs_list) last_epoch = np.max(epochs_list) if add_tzinfo: first_epoch = first_epoch.replace(tzinfo=dateutil.tz.tzutc()) last_epoch = last_epoch.replace(tzinfo=dateutil.tz.tzutc()) if verbose: print("first & last epochs : " , first_epoch , last_epoch) if not interval_out: return first_epoch , last_epoch else: interv_lis = np.diff(epochs_list) interv_lis = [e.seconds + e.microseconds * 10**-6 for e in interv_lis] interval = utils.most_common(interv_lis) print("interval : " , interval , last_epoch) #return interv_lis , epochs_list return first_epoch , last_epoch , interval