def write_ine_dummy_file(Sat_list, dt_in, extra_intrvl_strt=.1, extra_intrvl_end=.4, step=300, out_file_path=None): """ Write an EPOS INE dummy (empty values) file """ Lines = [] mjd = np.floor(conv.dt2MJD(dt_in)) mjd_strt = mjd - extra_intrvl_strt mjd_end = mjd + extra_intrvl_end + 1 datestr = conv.dt2str(dt.datetime.now(), str_format='%Y/%m/%d %H:%M:%S') mjd_strt_deci = mjd_strt - np.floor(mjd_strt) head_proto = """%=INE 1.00 {:} NEWSE=INE+ORBCOR +global day_info: epoch : {:5} {:16.14f} interval: {:11.5f} {:11.5f} stepsize: {:6.2f} -global +initial_orbit """ head = head_proto.format(datestr, int(mjd), 0, mjd_strt, mjd_end, step) Lines.append(head) for sat in Sat_list: Lines.append( "******************************************************************\n" ) sat_str = ine_block_mono(sat, dt_in, extra_intrvl_strt, extra_intrvl_end, step) Lines.append(sat_str) Lines.append( "******************************************************************\n" ) str_end = """-initial_orbit %ENDINE """ Lines.append(str_end) str_out = "".join(Lines) if out_file_path: with open(out_file_path, "w") as f: f.write(str_out) f.close() return str_out
def rinex_check_epochs_availability(rinex_path_list): """ Args : A list of rinex paths Returns : T : a table with results """ results_stk = [] for rinex_path in rinex_path_list: rinex_name = os.path.basename(rinex_path) QC = operational.teqc_qc(rinex_path) if not QC: continue epoc_all = int(utils.egrep_big_string("Poss. # of obs epochs" ,QC,only_first_occur=True).split()[-1]) epoc_disp = int(utils.egrep_big_string("Epochs w/ observations",QC,only_first_occur=True).split()[-1]) dt_rnx = conv.rinexname2dt(rinex_name) date_str = conv.dt2str(dt_rnx,"%F") percentage = (float(epoc_disp) / float(epoc_all)) * 100. results = [rinex_name,date_str,epoc_disp,epoc_all,percentage] results_stk.append(results) header = ['RINEX','date','Avbl.', 'Poss.', '%'] T = tabulate.tabulate(results_stk,headers=header) return T
def ESMGFZ_downloader(latitude,longitude,output_dir, components=["NTAL","NTOL","HYDL","SLEL"], CM_CF="CF", outputformat = "csv", formatvariables = "duNS,duEW,duV", startdate = dt.datetime(2010,1,1), enddate = dt.datetime(2020,1,1), outfile_prefix=""): """ Download loading contribution values for a specific point from ESM's website http://rz-vm115.gfz-potsdam.de:8080/repository/entry/show?entryid=2827909c-6c9d-46fd-ba2c-f806bf215170&output=wiki.view Parameters ---------- latitude : float latitude in degrees. longitude : float longitude in degrees. output_dir : str ouput directory for the downloaded files. components : list of str, optional list of the wished loading contributions. The default is ["NTAL","NTOL","HYDL","SLEL"]. CM_CF : str, optional Center of Figure (CF) or Center of Mass (CM). The default is "CF". outputformat : str, optional choose the output format. CSV is strongly recommended. 'netcdf' and 'timeseries' is also supported The default is "csv". formatvariables : str, optional outputed variables. The default is "duNS,duEW,duV". startdate : datetime, optional start date. The default is dt.datetime(2010,1,1). enddate : datetime, optional start end. The default is dt.datetime(2020,1,1). outfile_prefix : str, optional A custo. The default is "". Returns ------- output_path : TYPE DESCRIPTION. """ url_base = "http://esmdata.gfz-potsdam.de:8080/"+"repository/entry/show/Home/Elastic+Surface+Loading/" ### base entry if startdate >= dt.datetime(2010,1,1) and enddate >= dt.datetime(2010,1,1): url_period = '/2010-now' elif startdate < dt.datetime(2010,1,1) and enddate < dt.datetime(2010,1,1): url_period = '/2000-2009' else: url_period = '' for icomp in components: url_entry = icomp +"/" + CM_CF + "/2000-now" + url_period + "?submit=Get%20Point&output=data.gridaspoint" ### point url_lat = "&location.latitude=" + str(latitude) url_lon = "&location.longitude=" + str(longitude) url_format = "&format=" + outputformat ### period startdate_str = quote(conv.dt2str(startdate) + " UTC") enddate_str = quote(conv.dt2str(enddate) + " UTC") url_cal = "&calendar=proleptic_gregorian" + "&fromdate=" + startdate_str + "&todate=" + enddate_str ### variables url_var = "&variable=" + formatvariables ##concatenation Url_tuple = (url_base,url_entry,url_lat,url_lon,url_format,url_cal,url_var) url_out = "".join(Url_tuple) print("INFO: downloaded URL") print(url_out) output_file = "_".join((outfile_prefix, icomp, CM_CF, str(latitude), str(longitude), conv.dt2str(startdate,"%Y%m%d_%H%M%S"), conv.dt2str(enddate,"%Y%m%d_%H%M%S"))) output_path = os.path.join(output_dir,output_file) print("INFO: output file") print(output_path) DownObjt = urllib.request.urlretrieve(url_out, output_path) return output_path