def rinex_start_end(input_rinex_path,
                    interval_out=False,
                    add_tzinfo=False,
                    verbose=True,
                    safety_mode=True):
    """
    safety_mode :

        if the epoch reading fails (e.g. in case of a compressed RINEX)
        activate a reading of the header and the file name as backup.


    une liste d'epochs en début et fin de fichier
    => en trouver le min et le max
    NB : FAIRE UN FONCTION READ EPOCH A L'OCCAZ
    NBsuite : c'est fait au 161018 mais par contre c'est un dirty copier coller
    """
    epochs_list = []
    Head = utils.head(input_rinex_path, 1500)
    epochs_list_head = rinex_read_epoch(Head,
                                        interval_out=interval_out,
                                        add_tzinfo=add_tzinfo,
                                        out_array=False)

    Tail = utils.tail(input_rinex_path, 1500)
    epochs_list_tail = rinex_read_epoch(Tail,
                                        interval_out=interval_out,
                                        add_tzinfo=add_tzinfo,
                                        out_array=False)

    epochs_list = epochs_list_head + epochs_list_tail

    if len(epochs_list) == 0:
        first_epoch = conv.rinexname2dt(input_rinex_path)
        alphabet = list(string.ascii_lowercase)

        if os.path.basename(input_rinex_path)[7] in alphabet:
            last_epoch = first_epoch + dt.timedelta(hours=1)
        else:
            last_epoch = first_epoch + dt.timedelta(hours=24, seconds=-1)
    else:
        first_epoch = np.min(epochs_list)
        last_epoch = np.max(epochs_list)

    if add_tzinfo:
        first_epoch = first_epoch.replace(tzinfo=dateutil.tz.tzutc())
        last_epoch = last_epoch.replace(tzinfo=dateutil.tz.tzutc())

    if verbose:
        print("first & last epochs : ", first_epoch, last_epoch)
    if not interval_out:
        return first_epoch, last_epoch
    else:
        interv_lis = np.diff(epochs_list)
        interv_lis = [e.seconds + e.microseconds * 10**-6 for e in interv_lis]
        interval = utils.most_common(interv_lis)
        print("interval : ", interval, last_epoch)

        #return interv_lis , epochs_list
        return first_epoch, last_epoch, interval
def stations_in_EPOS_sta_coords_file_mono(coords_file_path):
    """
    Gives stations in a EPOS coords. file (YYYY_DDD_sta_coordinates)

    Parameters
    ----------
    coords_file_path : str
        path of the EPOS coords. file.

    Returns
    -------
    epoch : datetime
        the main mean epoch in the EPOS coords. file.
    stats_list : list
        list of 4 char station list.
    """

    SITE_line_list = utils.grep(coords_file_path, " SITE            m")

    stats_list = []
    mean_mjd_list = []
    for l in SITE_line_list:
        stat = l.split()[8].lower()
        stats_list.append(stat)
        mean_mjd = np.mean([float(l.split()[6]), float(l.split()[7])])
        mean_mjd_list.append(mean_mjd)

    mjd_final = utils.most_common(mean_mjd_list)
    epoch = conv.MJD2dt(mjd_final)
    return epoch, stats_list
def stations_in_sinex_mono(sinex_path):
    """
    Gives stations list in a SINEX file

    Parameters
    ----------
    sinex_path : str
         path of the SINEX file.

    Returns
    -------
    epoch : datetime
        the main mean epoch in the SINEX.
    stats_list : list
        list of 4 char station list.

    """

    extract = utils.extract_text_between_elements(sinex_path, '+SITE/ID',
                                                  '-SITE/ID')
    extract = extract.split('\n')
    extract2 = []
    for e in extract:
        if e != '' and e[0] == ' ' and e != '\n':
            extract2.append(e)

    stats_list = [e.split()[0].lower() for e in extract2]

    extract = utils.extract_text_between_elements(sinex_path,
                                                  '+SOLUTION/EPOCHS',
                                                  '-SOLUTION/EPOCHS')
    extract = extract.split('\n')
    extract2 = []
    for e in extract:
        if e != '' and e[0] == ' ' and e != '\n':
            extract2.append(e)

    epoch = conv.datestr_sinex_2_dt(
        utils.most_common([e.split()[-1] for e in extract2]))

    return epoch, stats_list
Beispiel #4
0
def write_sp3(SP3_DF_in, outpath, skip_null_epoch=True, force_format_c=False):
    """
    Write DOCSTRING
    
    skip_null_epoch: Do not write an epoch if all sats are null (filtering)

    """
    ################## MAIN DATA
    LinesStk = []

    SP3_DF_wrk = SP3_DF_in.sort_values(["epoch", "sat"])

    EpochRawList = SP3_DF_wrk["epoch"].unique()
    SatList = sorted(SP3_DF_wrk["sat"].unique())
    SatList = list(reversed(SatList))
    SatListSet = set(SatList)
    EpochUsedList = []

    for epoc in EpochRawList:
        SP3epoc = pd.DataFrame(SP3_DF_wrk[SP3_DF_wrk["epoch"] == epoc])
        ## Missing Sat
        MissingSats = SatListSet.difference(set(SP3epoc["sat"]))

        for miss_sat in MissingSats:
            miss_line = SP3epoc.iloc[0].copy()
            miss_line["sat"] = miss_sat
            miss_line["const"] = miss_sat[0]
            miss_line["x"] = 0.000000
            miss_line["y"] = 0.000000
            miss_line["z"] = 0.000000
            miss_line["clk"] = 999999.999999

            SP3epoc = SP3epoc.append(miss_line)

        SP3epoc.sort_values("sat", inplace=True, ascending=False)
        timestamp = conv.dt2sp3_timestamp(conv.numpy_datetime2dt(epoc)) + "\n"

        linefmt = "P{:}{:14.6f}{:14.6f}{:14.6f}{:14.6f}\n"

        LinesStkEpoch = []
        sum_val_epoch = 0
        for ilin, lin in SP3epoc.iterrows():
            line_out = linefmt.format(lin["sat"], lin["x"], lin["y"], lin["z"],
                                      lin["clk"])

            sum_val_epoch += lin["x"] + lin["y"] + lin["z"]

            LinesStkEpoch.append(line_out)

        ### if skip_null_epoch activated, print only if valid epoch
        if not (np.isclose(sum_val_epoch, 0) and skip_null_epoch):
            LinesStk.append(timestamp)  # stack the timestamp
            LinesStk = LinesStk + LinesStkEpoch  # stack the values
            EpochUsedList.append(epoc)  # stack the epoc as dt

    ################## HEADER
    ######### SATELLITE LIST

    Satline_stk = []
    Sigmaline_stk = []

    if force_format_c:
        nlines = 5
    else:
        div, mod = np.divmod(len(SatList), 17)

        if div < 5:
            nlines = 5
        else:
            nlines = div

            if mod != 0:
                nlines += 1

    for i in range(nlines):
        SatLine = SatList[17 * i:17 * (i + 1)]
        SatLineSigma = len(SatLine) * " 01"

        if len(SatLine) < 17:
            complem = " 00" * (17 - len(SatLine))
        else:
            complem = ""

        if i == 0:
            nbsat4line = len(SatList)
        else:
            nbsat4line = ''

        satline = "+  {:3}   ".format(nbsat4line) + "".join(
            SatLine) + complem + "\n"
        sigmaline = "++         0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0\n"
        sigmaline = "++       " + SatLineSigma + complem + "\n"

        Satline_stk.append(satline)
        Sigmaline_stk.append(sigmaline)

    ######### 2 First LINES
    start_dt = conv.numpy_datetime2dt(np.min(EpochUsedList))

    header_line1 = "#cP" + conv.dt2sp3_timestamp(
        start_dt, False) + "     {:3}".format(
            len(EpochUsedList)) + "   u+U IGSXX FIT  XXX\n"

    delta_epoch = int(utils.most_common(np.diff(EpochUsedList) * 10**-9))
    MJD = conv.dt2MJD(start_dt)
    MJD_int = int(np.floor(MJD))
    MJD_dec = MJD - MJD_int
    gps_wwww, gps_sec = conv.dt2gpstime(start_dt, False, "gps")

    header_line2 = "## {:4} {:15.8f} {:14.8f} {:5} {:15.13f}\n".format(
        gps_wwww, gps_sec, delta_epoch, MJD_int, MJD_dec)

    ######### HEADER BOTTOM
    header_bottom = """%c M  cc GPS ccc cccc cccc cccc cccc ccccc ccccc ccccc ccccc
%c cc cc ccc ccc cccc cccc cccc cccc ccccc ccccc ccccc ccccc
%f  1.2500000  1.025000000  0.00000000000  0.000000000000000
%f  0.0000000  0.000000000  0.00000000000  0.000000000000000
%i    0    0    0    0      0      0      0      0         0
%i    0    0    0    0      0      0      0      0         0
/* PCV:IGSXX_XXXX OL/AL:FESXXXX  NONE     YN CLK:CoN ORB:CoN
/*     GeodeZYX Toolbox Output
/*
/*
"""

    ################## FINAL STACK

    FinalLinesStk = []

    FinalLinesStk.append(header_line1)
    FinalLinesStk.append(header_line2)
    FinalLinesStk = FinalLinesStk + Satline_stk + Sigmaline_stk
    FinalLinesStk.append(header_bottom)
    FinalLinesStk = FinalLinesStk + LinesStk + ["EOF"]

    FinalStr = "".join(FinalLinesStk)

    F = open(outpath, "w+")
    F.write(FinalStr)
def rinex_start_end(input_rinex_path,interval_out=False,
                    add_tzinfo=False,verbose = True,
                    safety_mode = True):
    """
    Return the first and the last epoch of a RINEX file
    (based on the actual content of the file, not the header)
    
    Can handle REINX 2 and 3

    Parameters
    ----------
    input_rinex_path : TYPE
        path of the rinex file.
        can be the path of a RINEX or directly
        the RINEX content as a string     
    interval_out : bool, optional
        output also the intervals. The default is False.
        
    add_tzinfo : bool, optional
        add timezone information in the datetime's Epoches.
        The default is False.

    verbose : bool, optional
        verbose mode. The default is True.
        
    safety_mode : TYPE, optional
        if the epoch reading fails (e.g. in case of a compressed RINEX)
        activate a reading of the header and the file name as backup.
        The default is True.

    Returns
    -------
    first_epoch , last_epoch , [interval]
        First, las epoches and interval if asked.

    """
    
    
    


    #une liste d'epochs en début et fin de fichier
    #=> en trouver le min et le max
    #NB : FAIRE UN FONCTION READ EPOCH A L'OCCAZ
    #NBsuite : c'est fait au 161018 mais par contre c'est un dirty copier coller

    epochs_list = []
    Head = utils.head(input_rinex_path,1500)
    epochs_list_head = rinex_read_epoch(Head,interval_out=interval_out,
                                        add_tzinfo=add_tzinfo,out_array=False)


    Tail =  utils.tail(input_rinex_path,1500)
    epochs_list_tail = rinex_read_epoch(Tail,interval_out=interval_out,
                                        add_tzinfo=add_tzinfo,out_array=False)

    epochs_list = epochs_list_head + epochs_list_tail

    if len(epochs_list) == 0:
        first_epoch = conv.rinexname2dt(input_rinex_path)
        alphabet = list(string.ascii_lowercase)

        if os.path.basename(input_rinex_path)[7] in alphabet:
            last_epoch = first_epoch + dt.timedelta(hours=1)
        else:
            last_epoch = first_epoch + dt.timedelta(hours=24,seconds=-1)
    else:
        first_epoch = np.min(epochs_list)
        last_epoch = np.max(epochs_list)

    if add_tzinfo:
        first_epoch = first_epoch.replace(tzinfo=dateutil.tz.tzutc())
        last_epoch = last_epoch.replace(tzinfo=dateutil.tz.tzutc())

    if verbose:
        print("first & last epochs : " , first_epoch , last_epoch)
    if not interval_out:
        return first_epoch , last_epoch
    else:
        interv_lis = np.diff(epochs_list)
        interv_lis = [e.seconds + e.microseconds * 10**-6 for e in interv_lis]
        interval   = utils.most_common(interv_lis)
        print("interval : " , interval , last_epoch)

        #return interv_lis , epochs_list
        return first_epoch , last_epoch , interval
def write_epos_sta_coords(DF_in,
                          file_out,
                          sort_wrt="site",
                          no_time_limit_for_first_period=True,
                          no_time_limit_for_last_period=True,
                          soln_in_DF=True):
    """
    Write an EPOS coordinate file

    Parameters
    ----------
    DF_in : DataFrame
        Input Orbit DataFrame.
    file_out : str
        The output path of the file.
    sort_wrt : bool, optional
        Sort the values with respect to a DF column. 
        The default is "site".
    no_time_limit_for_first_period : bool, optional
        No time limit for the first period. 
        The default is True.
    no_time_limit_for_last_period : bool, optional
        No time limit for the last period. 
        The default is True.
    soln_in_DF : bool, optional
        Soln in DF. 
        The default is True.

    Returns
    -------
    None.

    """

    DF_work = DF_in.sort_values([sort_wrt, "MJD_start"])

    Stat_lines_blk_stk = []

    generic_header = """+info
 FLATTENING                  298.2550
 MAJOR_AXIS              6378140.0000
 REFERENCE_FRAME                IGS14
 NUMBER_OF_STATIONS             {:5d}
 REF_MJD                        {:5d}
-info
"""

    generic_header = generic_header.format(
        len(DF_work["site_num"].unique()),
        int(utils.most_common(DF_work["MJD_ref"])))

    Stat_lines_blk_stk.append(generic_header)

    Stat_lines_blk_stk.append("+station_coordinates")

    for site in DF_work[sort_wrt].unique():

        Stat_lines_blk_stk.append(
            "*------------------------- ---- ----- -beg- -end- -**- ------------------------------------------------\n*"
        )

        DF_SiteBlock = DF_work[DF_work[sort_wrt] == site]

        DF_SiteBlock.reset_index(inplace=True)

        for i_l, (_, l) in enumerate(DF_SiteBlock.iterrows()):

            if soln_in_DF:
                iope = int(l["soln"])
                pt = l["pt"]
            else:
                iope = i_l + 1
                pt = "A"

            if no_time_limit_for_first_period and i_l == 0:
                MJD_start = 0
            else:
                MJD_start = l["MJD_start"]

            if no_time_limit_for_last_period and (i_l +
                                                  1) == len(DF_SiteBlock):
                MJD_end = 0
            else:
                MJD_end = l["MJD_end"]

            line_site_fmt = " SITE            m {:4d}  {:1d} {:} {:5d} {:5d} {:5d} {:}   {:}  {:1d}      LOG_CAR       LOG_CAR"
            line_valu_fmt = " POS_VEL:XYZ     m {:4d}  {:1d} {:+15.4f} {:+15.4f} {:+15.4f}      {:+6.4f} {:+6.4f} {:+6.4f}"
            line_sigm_fmt = " SIG_PV_XYZ      m {:4d}  {:1d} {:+15.4f} {:+15.4f} {:+15.4f}      {:+6.4f} {:+6.4f} {:+6.4f}"

            line_site = line_site_fmt.format(int(l["site_num"]), int(iope),
                                             l["tecto_plate"].upper(),
                                             int(l["MJD_ref"]), int(MJD_start),
                                             int(MJD_end), l["site"], pt,
                                             int(iope))

            line_valu = line_valu_fmt.format(int(l["site_num"]), int(iope),
                                             l["x"], l["y"], l["z"], l["Vx"],
                                             l["Vy"], l["Vz"])

            line_sigm = line_sigm_fmt.format(int(l["site_num"]), int(iope),
                                             l["sx"], l["sy"], l["sz"],
                                             l["sVx"], l["sVy"], l["sVz"])

            Stat_lines_blk_stk.append(line_site)
            Stat_lines_blk_stk.append(line_valu)
            Stat_lines_blk_stk.append(line_sigm)
            Stat_lines_blk_stk.append("*")

    Stat_lines_blk_stk.append("-station_coordinates")

    final_str = "\n".join(Stat_lines_blk_stk)

    with open(file_out, "w+") as f:
        f.write(final_str)

    return final_str
def write_sp3(SP3_DF_in,
              outpath,
              outname=None,
              prefix='orb',
              skip_null_epoch=True,
              force_format_c=False):
    """
    Write a SP3 file from an Orbit DataFrame

    Parameters
    ----------
    SP3_DF_in : DataFrame
        Input Orbit DataFrame.
    outpath : str
        The output path of the file (see also outname).
    outname : None or str, optional
        None = outpath is the full path (directory + filename) of the output.
        A string = a manual name for the file.
        'auto_old_cnv' = automatically generate the filename (old convention)
        'auto_new_cnv' = automatically generate the filename (new convention)
        The default is None.
    prefix : str, optional
        the output 3-char. name of the AC. The default is 'orb'.
    skip_null_epoch : bool, optional
        Do not write an epoch if all sats are null (filtering). 
        The default is True.
    force_format_c : bool, optional
        DESCRIPTION. The default is False.

    Returns
    -------
    The string containing the formatted SP3 data.
    """

    ################## MAIN DATA
    LinesStk = []

    SP3_DF_wrk = SP3_DF_in.sort_values(["epoch", "sat"])

    EpochRawList = SP3_DF_wrk["epoch"].unique()
    SatList = sorted(SP3_DF_wrk["sat"].unique())
    SatList = list(reversed(SatList))
    SatListSet = set(SatList)
    EpochUsedList = []

    if not "clk" in SP3_DF_wrk.columns:
        SP3_DF_wrk["clk"] = 999999.999999

    for epoc in EpochRawList:
        SP3epoc = pd.DataFrame(SP3_DF_wrk[SP3_DF_wrk["epoch"] == epoc])
        ## manage missing Sats for the current epoc
        MissingSats = SatListSet.difference(set(SP3epoc["sat"]))

        for miss_sat in MissingSats:
            miss_line = SP3epoc.iloc[0].copy()
            miss_line["sat"] = miss_sat
            miss_line["const"] = miss_sat[0]
            miss_line["x"] = 0.000000
            miss_line["y"] = 0.000000
            miss_line["z"] = 0.000000
            miss_line["clk"] = 999999.999999

            SP3epoc = SP3epoc.append(miss_line)
        #### end of missing sat bloc

        SP3epoc.sort_values("sat", inplace=True, ascending=False)
        timestamp = conv.dt2sp3_timestamp(conv.numpy_dt2dt(epoc)) + "\n"

        linefmt = "P{:}{:14.6f}{:14.6f}{:14.6f}{:14.6f}\n"

        LinesStkEpoch = []
        sum_val_epoch = 0
        for ilin, lin in SP3epoc.iterrows():
            if not "clk" in lin.index:  # manage case if no clk in columns
                lin["clk"] = 999999.999999
            line_out = linefmt.format(lin["sat"], lin["x"], lin["y"], lin["z"],
                                      lin["clk"])

            sum_val_epoch += lin["x"] + lin["y"] + lin["z"]

            LinesStkEpoch.append(line_out)

        ### if skip_null_epoch activated, print only if valid epoch
        if not (np.isclose(sum_val_epoch, 0) and skip_null_epoch):
            LinesStk.append(timestamp)  # stack the timestamp
            LinesStk = LinesStk + LinesStkEpoch  # stack the values
            EpochUsedList.append(epoc)  # stack the epoc as dt

    ################## HEADER
    ######### SATELLITE LIST

    Satline_stk = []
    Sigmaline_stk = []

    if force_format_c:
        nlines = 5
    else:
        div, mod = np.divmod(len(SatList), 17)

        if div < 5:
            nlines = 5
        else:
            nlines = div

            if mod != 0:
                nlines += 1

    for i in range(nlines):
        SatLine = SatList[17 * i:17 * (i + 1)]
        SatLineSigma = len(SatLine) * " 01"

        if len(SatLine) < 17:
            complem = " 00" * (17 - len(SatLine))
        else:
            complem = ""

        if i == 0:
            nbsat4line = len(SatList)
        else:
            nbsat4line = ''

        satline = "+  {:3}   ".format(nbsat4line) + "".join(
            SatLine) + complem + "\n"
        sigmaline = "++         0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0\n"
        sigmaline = "++       " + SatLineSigma + complem + "\n"

        Satline_stk.append(satline)
        Sigmaline_stk.append(sigmaline)

    ######### 2 First LINES
    start_dt = conv.numpy_dt2dt(np.min(EpochUsedList))

    header_line1 = "#cP" + conv.dt2sp3_timestamp(
        start_dt, False) + "     {:3}".format(
            len(EpochUsedList)) + "   u+U IGSXX FIT  XXX\n"

    delta_epoch = int(utils.most_common(np.diff(EpochUsedList) * 10**-9))
    MJD = conv.dt2MJD(start_dt)
    MJD_int = int(np.floor(MJD))
    MJD_dec = MJD - MJD_int
    gps_wwww, gps_sec = conv.dt2gpstime(start_dt, False, "gps")

    header_line2 = "## {:4} {:15.8f} {:14.8f} {:5} {:15.13f}\n".format(
        gps_wwww, gps_sec, delta_epoch, MJD_int, MJD_dec)

    ######### HEADER BOTTOM
    header_bottom = """%c M  cc GPS ccc cccc cccc cccc cccc ccccc ccccc ccccc ccccc
%c cc cc ccc ccc cccc cccc cccc cccc ccccc ccccc ccccc ccccc
%f  1.2500000  1.025000000  0.00000000000  0.000000000000000
%f  0.0000000  0.000000000  0.00000000000  0.000000000000000
%i    0    0    0    0      0      0      0      0         0
%i    0    0    0    0      0      0      0      0         0
/* PCV:IGSXX_XXXX OL/AL:FESXXXX  NONE     YN CLK:CoN ORB:CoN
/*     GeodeZYX Toolbox Output
/*
/*
"""

    ################## FINAL STACK

    FinalLinesStk = []

    FinalLinesStk.append(header_line1)
    FinalLinesStk.append(header_line2)
    FinalLinesStk = FinalLinesStk + Satline_stk + Sigmaline_stk
    FinalLinesStk.append(header_bottom)
    FinalLinesStk = FinalLinesStk + LinesStk + ["EOF"]

    FinalStr = "".join(FinalLinesStk)

    ### Manage the file path
    prefix_opera = prefix

    if not outname:
        outpath_opera = outpath
    elif outname == 'auto_old_cnv':
        week, dow = conv.dt2gpstime(start_dt)
        filename = prefix_opera + str(week) + str(dow) + '.sp3'
        outpath_opera = os.path.join(outpath, filename)

    elif outname == 'auto_new_cnv':
        print("ERR: not implemented yet !!!!!")
        raise Exception

    F = open(outpath_opera, "w+")
    F.write(FinalStr)