예제 #1
0
def run_track(temp_dir,exp_full_name,out_conf_fil,date,rnx_rover):
    dowstring = ''.join([str(e) for e in conv.dt2gpstime(date)])
    bigcomand = ' '.join(("track -f" ,  out_conf_fil , '-d' , conv.dt2doy(date) ,'-w', dowstring))

    print('INFO : command launched :')
    print(bigcomand)

    # START OF PROCESSING
    os.chdir(temp_dir)
    subprocess.call([bigcomand], executable='/bin/bash', shell=True)

    outfiles = []
    outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*sum*'))
    outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*pos*'))
    outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*cmd*'))

    Antobj_rov , Recobj_rov , Siteobj_rov , Locobj_rov = \
    files_rw.read_rinex_2_dataobjts(rnx_rover)

    [shutil.copy(e,out_dir) for e in outfiles]
    [os.remove(e) for e in outfiles]

    print("TRACK RUN FINISHED")
    print('results available in ' , out_dir)

    return None
예제 #2
0
def compar_orbit(Data_inp_1,
                 Data_inp_2,
                 step_data=900,
                 sats_used_list=['G'],
                 name1='',
                 name2='',
                 use_name_1_2_for_table_name=False,
                 RTNoutput=True,
                 convert_ECEF_ECI=True,
                 clean_null_values=True,
                 conv_coef=10**3,
                 return_satNull=False):
    """
    Compares 2 GNSS orbits files (SP3), and gives a summary plot and a
    statistics table

    Parameters
    ----------
    Data_inp_1 & Data_inp_2 : str or Pandas DataFrame
        contains the orbits or path (string) to the sp3

    step_data : int
        per default data sampling

    sats_used_list : list of str
        used constellation or satellite : G E R C ... E01 , G02 ...
        Individuals satellites are prioritary on whole constellations
        e.g. ['G',"E04"]


    RTNoutput : bool
        select output, Radial Transverse Normal or XYZ

    convert_ECEF_ECI : bool
        convert sp3 ECEF => ECI, must be True in operational !

    name1 & name2 : str (optionals)
        optional custom names for the 2 orbits

    use_name_1_2_for_table_name : bool
        False : use name 1 and 2 for table name, use datafile instead

    clean_null_values : bool or str
        if True or "all" remove sat position in all X,Y,Z values
        are null (0.000000)
        if "any", remove sat position if X or Y or Z is null
        if False, keep everything
        
    conv_coef : int
        conversion coefficient, km to m 10**3, km to mm 10**6

    Returns
    -------
    Diff_sat_all : Pandas DataFrame
    contains differences b/w Data_inp_1 & Data_inp_2
    in Radial Transverse Normal OR XYZ frame

        Attributes of Diff_sat_all :
            Diff_sat_all.name : title of the table

    Note
    ----
    clean_null_values if useful (and necessary) only if
    convert_ECEF_ECI = False
    if convert_ECEF_ECI = True, the cleaning will be done by
    a side effect trick : the convertion ECEF => ECI will generate NaN
    for a zero-valued position
    But, nevertheless, activating  clean_null_values = True is better
    This Note is in fact usefull if you want to see bad positions on a plot
    => Then convert_ECEF_ECI = False and clean_null_values = False

    Source
    ------
    "Coordinate Systems", ASEN 3200 1/24/06 George H. Born

    """

    # selection of both used Constellations AND satellites
    const_used_list = []
    sv_used_list = []
    for sat in sats_used_list:
        if len(sat) == 1:
            const_used_list.append(sat)
        elif len(sat) == 3:
            sv_used_list.append(sat)
            if not sat[0] in const_used_list:
                const_used_list.append(sat[0])

    # Read the files or DataFrames
    # metadata attributes are not copied
    # Thus, manual copy ...
    # (Dirty way, should be impoved without so many lines ...)
    if type(Data_inp_1) is str:
        D1orig = files_rw.read_sp3(Data_inp_1, epoch_as_pd_index=True)
    else:
        D1orig = Data_inp_1.copy(True)
        try:
            D1orig.name = Data_inp_1.name
        except:
            D1orig.name = "no_name"
        try:
            D1orig.path = Data_inp_1.path
        except:
            D1orig.path = "no_path"
        try:
            D1orig.filename = Data_inp_1.filename
        except:
            D1orig.filename = "no_filename"

    if type(Data_inp_2) is str:
        D2orig = files_rw.read_sp3(Data_inp_2, epoch_as_pd_index=True)
    else:
        D2orig = Data_inp_2.copy(True)
        try:
            D2orig.name = Data_inp_2.name
        except:
            D2orig.name = "no_name"
        try:
            D2orig.path = Data_inp_2.path
        except:
            D2orig.path = "no_path"
        try:
            D2orig.filename = Data_inp_2.filename
        except:
            D2orig.filename = "no_filename"

    #### NB : It has been decided with GM that the index of a SP3 dataframe
    ####      will be integers, not epoch datetime anymore
    ####      BUT here, for legacy reasons, the index has to be datetime

    if isinstance(D1orig.index[0], (int, np.integer)):
        D1orig.set_index("epoch", inplace=True)

    if isinstance(D2orig.index[0], (int, np.integer)):
        D2orig.set_index("epoch", inplace=True)

    Diff_sat_stk = []

    # This block is for removing null values
    if clean_null_values:
        if clean_null_values == "all":
            all_or_any = np.all
        elif clean_null_values == "any":
            all_or_any = np.any
        else:
            all_or_any = np.all

        xyz_lst = ['x', 'y', 'z']

        D1_null_bool = all_or_any(np.isclose(D1orig[xyz_lst], 0.), axis=1)
        D2_null_bool = all_or_any(np.isclose(D2orig[xyz_lst], 0.), axis=1)

        D1 = D1orig[np.logical_not(D1_null_bool)]
        D2 = D2orig[np.logical_not(D2_null_bool)]

        if np.any(D1_null_bool) or np.any(D2_null_bool):
            sat_nul = utils.join_improved(
                " ", *list(set(D1orig[D1_null_bool]["sat"])))
            print("WARN : Null values contained in SP3 files : ")
            print(
                "f1:", np.sum(D1_null_bool),
                utils.join_improved(" ",
                                    *list(set(D1orig[D1_null_bool]["sat"]))))
            print(
                "f2:", np.sum(D2_null_bool),
                utils.join_improved(" ",
                                    *list(set(D2orig[D2_null_bool]["sat"]))))
        else:
            sat_nul = []

    else:
        D1 = D1orig.copy()
        D2 = D2orig.copy()

    for constuse in const_used_list:
        D1const = D1[D1['const'] == constuse]
        D2const = D2[D2['const'] == constuse]

        # checking if the data correspond to the step
        bool_step1 = np.mod((D1const.index - np.min(D1.index)).seconds,
                            step_data) == 0
        bool_step2 = np.mod((D2const.index - np.min(D2.index)).seconds,
                            step_data) == 0

        D1window = D1const[bool_step1]
        D2window = D2const[bool_step2]

        # find common sats and common epochs
        sv_set = sorted(
            list(set(D1window['sv']).intersection(set(D2window['sv']))))
        epoc_set = sorted(
            list(set(D1window.index).intersection(set(D2window.index))))

        # if special selection of sats, then apply it
        # (it is late and this selection is incredibely complicated ...)
        if np.any([True if constuse in e else False for e in sv_used_list]):
            # first find the selected sats for the good constellation
            sv_used_select_list = [
                int(e[1:]) for e in sv_used_list if constuse in e
            ]
            #and apply it
            sv_set = sorted(
                list(set(sv_set).intersection(set(sv_used_select_list))))

        for svv in sv_set:
            # First research : find corresponding epoch for the SV
            # this one is sufficent if there is no gaps (e.g. with 0.00000) i.e.
            # same nb of obs in the 2 files
            # NB : .reindex() is smart, it fills the DataFrame
            # with NaN
            try:
                D1sv_orig = D1window[D1window['sv'] == svv].reindex(epoc_set)
                D2sv_orig = D2window[D2window['sv'] == svv].reindex(epoc_set)
            except Exception as exce:
                print("ERR : Unable to re-index with an unique epoch")
                print(
                    "      are you sure there is no multiple-defined epochs for the same sat ?"
                )
                print(
                    "      it happens e.g. when multiple ACs are in the same DataFrame "
                )
                print(
                    "TIP : Filter the input Dataframe before calling this fct with"
                )
                print("      DF = DF[DF['AC'] == 'gbm']")
                raise exce

            # Second research, it is a security in case of gap
            # This step is useless, because .reindex() will fill the DataFrame
            # with NaN
            if len(D1sv_orig) != len(D2sv_orig):
                print("INFO : different epochs nbr for SV", svv,
                      len(D1sv_orig), len(D2sv_orig))
                epoc_sv_set = sorted(
                    list(
                        set(D1sv_orig.index).intersection(set(
                            D2sv_orig.index))))
                D1sv = D1sv_orig.loc[epoc_sv_set]
                D2sv = D2sv_orig.loc[epoc_sv_set]
            else:
                D1sv = D1sv_orig
                D2sv = D2sv_orig

            P1 = D1sv[['x', 'y', 'z']]
            P2 = D2sv[['x', 'y', 'z']]

            # Start ECEF => ECI
            if convert_ECEF_ECI:
                # Backup because the columns xyz will be reaffected
                #D1sv_bkp = D1sv.copy()
                #D2sv_bkp = D2sv.copy()

                P1b = conv.ECEF2ECI(
                    np.array(P1),
                    conv.dt_gpstime2dt_utc(P1.index.to_pydatetime(),
                                           out_array=True))
                P2b = conv.ECEF2ECI(
                    np.array(P2),
                    conv.dt_gpstime2dt_utc(P2.index.to_pydatetime(),
                                           out_array=True))

                D1sv[['x', 'y', 'z']] = P1b
                D2sv[['x', 'y', 'z']] = P2b

                P1 = D1sv[['x', 'y', 'z']]
                P2 = D2sv[['x', 'y', 'z']]
            # End ECEF => ECI

            if not RTNoutput:
                # Compatible with the documentation +
                # empirically tested with OV software
                # it is  P1 - P2 (and not P2 - P1)
                Delta_P = P1 - P2

                Diff_sat = Delta_P.copy()
                Diff_sat.columns = ['dx', 'dy', 'dz']

            else:
                rnorm = np.linalg.norm(P1, axis=1)

                Vx = utils.diff_pandas(D1sv, 'x')
                Vy = utils.diff_pandas(D1sv, 'y')
                Vz = utils.diff_pandas(D1sv, 'z')

                V = pd.concat((Vx, Vy, Vz), axis=1)
                V.columns = ['vx', 'vy', 'vz']

                R = P1.divide(rnorm, axis=0)
                R.columns = ['xnorm', 'ynorm', 'znorm']

                H = pd.DataFrame(np.cross(R, V), columns=['hx', 'hy', 'hz'])
                hnorm = np.linalg.norm(H, axis=1)

                C = H.divide(hnorm, axis=0)
                C.columns = ['hxnorm', 'hynorm', 'hznorm']

                I = pd.DataFrame(np.cross(C, R), columns=['ix', 'iy', 'iz'])

                R_ar = np.array(R)
                I_ar = np.array(I)
                C_ar = np.array(C)

                #R_ar[1]
                Beta = np.stack((R_ar, I_ar, C_ar), axis=1)

                # Compatible with the documentation +
                # empirically tested with OV software
                # it is  P1 - P2 (and not P2 - P1)
                Delta_P = P1 - P2

                # Final determination
                Astk = []

                for i in range(len(Delta_P)):
                    A = np.dot(Beta[i, :, :], np.array(Delta_P)[i])
                    Astk.append(A)

                Diff_sat = pd.DataFrame(np.vstack(Astk),
                                        index=P1.index,
                                        columns=['dr', 'dt', 'dn'])

            Diff_sat = Diff_sat * conv_coef  # metrer conversion

            Diff_sat['const'] = [constuse] * len(Diff_sat.index)
            Diff_sat['sv'] = [svv] * len(Diff_sat.index)
            Diff_sat['sat'] = [constuse + str(svv).zfill(2)] * len(
                Diff_sat.index)

            Diff_sat_stk.append(Diff_sat)

    Diff_sat_all = pd.concat(Diff_sat_stk)
    Date = Diff_sat.index[0]

    # Attribute definition
    if RTNoutput:
        Diff_sat_all.frame_type = 'RTN'

        # Pandas donesn't manage well iterable as attribute
        # So, it is separated
        Diff_sat_all.frame_col_name1 = 'dr'
        Diff_sat_all.frame_col_name2 = 'dt'
        Diff_sat_all.frame_col_name3 = 'dn'

    else:
        # Pandas donesn't manage well iterable as attribute
        # So, it is separated
        Diff_sat_all.frame_col_name1 = 'dx'
        Diff_sat_all.frame_col_name2 = 'dy'
        Diff_sat_all.frame_col_name3 = 'dz'

        if convert_ECEF_ECI:
            Diff_sat_all.frame_type = 'ECI'
        else:
            Diff_sat_all.frame_type = 'ECEF'

    # Name definitions
    if name1:
        Diff_sat_all.name1 = name1
    else:
        Diff_sat_all.name1 = D1orig.name

    if name2:
        Diff_sat_all.name2 = name2
    else:
        Diff_sat_all.name2 = D2orig.name

    Diff_sat_all.filename1 = D1orig.filename
    Diff_sat_all.filename2 = D2orig.filename

    Diff_sat_all.path1 = D1orig.path
    Diff_sat_all.path2 = D2orig.path

    Diff_sat_all.name = ' '.join(
        ('Orbits comparison (' + Diff_sat_all.frame_type + ') b/w',
         Diff_sat_all.name1, '(ref.) and', Diff_sat_all.name2, ',',
         Date.strftime("%Y-%m-%d"), ', doy', str(conv.dt2doy(Date))))

    if return_satNull:
        return Diff_sat_all, sat_nul
    else:
        return Diff_sat_all
예제 #3
0
def track_runner(rnx_rover,rnx_base,working_dir,experience_prefix,
                 XYZbase  = [], XYZrover = [] , outtype = 'XYZ',mode = 'short',
                 interval=None,antmodfile = "~/gg/tables/antmod.dat",
                 calc_center='igs' , forced_sp3_path = '',
                 const="G",silent=False,rinex_full_path=False,
                 run_on_gfz_cluster=False,forced_iono_path=''):

    # paths & files
    working_dir = utils.create_dir(working_dir)
    temp_dir    = utils.create_dir(os.path.join(working_dir,'TEMP'))
    out_dir     = utils.create_dir(os.path.join(working_dir,'OUTPUT'))

    if operational.check_if_compressed_rinex(rnx_rover):
        rnx_rover = operational.crz2rnx(rnx_rover,temp_dir)
    else:
        shutil.copy(rnx_rover,temp_dir)

    if operational.check_if_compressed_rinex(rnx_base):
        rnx_base  = operational.crz2rnx(rnx_base,temp_dir)
    else:
        shutil.copy(rnx_base,temp_dir)

    # RINEX START & END
    rov_srt, rov_end , rov_itv = operational.rinex_start_end(rnx_rover,1)
    bas_srt, bas_end , bas_itv = operational.rinex_start_end(rnx_base,1)

    # RINEX NAMES
    rov_name = os.path.basename(rnx_rover)[0:4]
    bas_name = os.path.basename(rnx_base)[0:4]

    rov_name_uper = rov_name.upper()
    bas_name_uper = bas_name.upper()


    srt_str = rov_srt.strftime("%Y_%j")
    exp_full_name = '_'.join((experience_prefix,rov_name,bas_name,srt_str))

    out_conf_fil   = os.path.join(out_dir,exp_full_name + '.cmd')
    out_result_fil = os.path.join(out_dir,exp_full_name + '.out' )

    print(out_conf_fil)

    confobj = open(out_conf_fil,'w+')


    # Obs Files
    confobj.write(' obs_file' + '\n')
    ### just the basename, the caracter nb is limited  (20210415)
    if not rinex_full_path:
        confobj.write(' '.join((' ',bas_name_uper,os.path.basename(rnx_base) ,'F'))+ '\n')
        confobj.write(' '.join((' ',rov_name_uper,os.path.basename(rnx_rover),'K'))+ '\n')
    else:
        confobj.write(' '.join((' ',bas_name_uper,rnx_base ,'F'))+ '\n')
        confobj.write(' '.join((' ',rov_name_uper,rnx_rover,'K'))+ '\n')
    confobj.write('\n')

    date = conv.rinexname2dt(os.path.basename(rnx_rover))

    # Nav File
    if forced_sp3_path == '':
        strt_rnd = dt.datetime(*bas_srt.timetuple()[:3])
        end_rnd  = dt.datetime(*bas_end.timetuple()[:3])
                
        orblis = operational.multi_downloader_orbs_clks( temp_dir ,
                                                        strt_rnd , end_rnd ,
                                                        archtype='/',
                                                        calc_center = calc_center)
        
        #sp3Z = orblis[0]
        sp3 = [utils.uncompress(sp3Z) for sp3Z in orblis]
        sp3 = [e  if ".sp3" in e[-5:] else e + ".sp3" for e in sp3]
    else:
        if utils.is_iterable(forced_sp3_path):
            sp3 = forced_sp3_path
        else:
            sp3 = [forced_sp3_path]
    for sp3_mono in sp3: 
        confobj.write(' '.join((' ','nav_file',sp3_mono ,' sp3'))+ '\n')
    confobj.write('\n')

    # Iono file
   
    if forced_iono_path != '':
        confobj.write(' ionex_file ' +  forced_iono_path  + '\n' )
    

    # Mode
    confobj.write(' mode ' +  mode + '\n')
    confobj.write('\n')

    # Output
    confobj.write(' pos_root ' + exp_full_name +'.pos' + '\n' )
    confobj.write(' res_root ' + exp_full_name +'.res' + '\n' )
    confobj.write(' sum_file ' + exp_full_name +'.sum' + '\n' )
    confobj.write('\n')

    # Outtype
    confobj.write(' out_type ' + outtype + '\n')
    confobj.write('\n')

    # Interval
    if not interval:
        confobj.write(' interval ' + str(rov_itv) + '\n')
    else:
        confobj.write(' interval ' + str(interval) + '\n')

    confobj.write('\n')

    # Coords
    bool_site_pos = False
    if XYZbase != []:
        if not bool_site_pos:
            confobj.write(' site_pos \n')
            bool_site_pos = True
        XYZbase = [str(e) for e in XYZbase]
        confobj.write(' '.join([' ', bas_name_uper] + XYZbase + ['\n']))

    if XYZrover != []:
        if not bool_site_pos:
            confobj.write(' site_pos \n')
            bool_site_pos = True
        XYZrover = [str(e) for e in XYZrover]
        confobj.write(' '.join([' ', rov_name_uper] + XYZrover + ['\n']))

    if bool_site_pos:
        confobj.write('\n')

    # Offsets
    confobj.write(' ante_off \n')

    Antobj_rov , Recobj_rov , Siteobj_rov , Locobj_rov = \
    files_rw.read_rinex_2_dataobjts(rnx_rover)

    confobj.write(' '.join([' ', rov_name_uper ,
                            str(Antobj_rov.North_Ecc) ,
                            str(Antobj_rov.East_Ecc) ,
                            str(Antobj_rov.Up_Ecc) ,
                            Antobj_rov.Antenna_Type , '\n']))

    Antobj_bas , Recobj_bas , Siteobj_bas , Locobj_bas = \
    files_rw.read_rinex_2_dataobjts(rnx_base)

    confobj.write(' '.join([' ', bas_name_uper ,
                            str(Antobj_bas.North_Ecc) ,
                            str(Antobj_bas.East_Ecc) ,
                            str(Antobj_bas.Up_Ecc) ,
                            Antobj_bas.Antenna_Type , '\n']))
    confobj.write('\n')

    # Site_stats
    confobj.write(' site_stats \n')
    confobj.write(' ' + bas_name_uper  + " 0.1 0.1 0.1 0 0 0" + '\n')
    confobj.write(' ' + rov_name_uper  + " 20 20 20 0.5 0.5 0.5" + '\n')
    confobj.write('\n')

    # constellqtions
    confobj.write(" TR_GNSS " + const + '\n')


    # Misc
    #confobj.write(" USE_GPTGMF"   + '\n')
    confobj.write(" ATM_MODELC GMF 0.5"   + '\n')
    confobj.write(" ANTMOD_FILE " + antmodfile + '\n')
    confobj.write(" DCB_FILE "    + "~/gg/incremental_updates/tables/dcb.dat.gnss" + '\n')


    confobj.write(" atm_stats" + '\n')
    confobj.write('  all 0.1 0.00030.00023' + '\n')


    confobj.close()
    #END OF FILE WRITING

    dowstring = ''.join([str(e) for e in conv.dt2gpstime(date)])
    bigcomand = ' '.join(("track -f" ,  out_conf_fil , '-d' , conv.dt2doy(date) ,'-w', dowstring))

    if run_on_gfz_cluster:
        bigcomand = "cjob -c '" + bigcomand + "'"
        executable="/bin/csh"
    else:
        executable="/bin/bash"

    print('INFO : command launched :')
    print(bigcomand)


    # START OF PROCESSING
    if not silent:
        os.chdir(temp_dir)
        try:
            subprocess.call([bigcomand], executable=executable, shell=True,timeout=60*20)
        except subprocess.TimeoutExpired:
            print("WARN: command timeout expired, skip")
            pass
    
        outfiles = []
        outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*sum*'))
        outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*pos*'))
        outfiles = outfiles + glob.glob(os.path.join(temp_dir,exp_full_name + '*cmd*'))
    
        Antobj_rov , Recobj_rov , Siteobj_rov , Locobj_rov = \
        files_rw.read_rinex_2_dataobjts(rnx_rover)
    
        [shutil.copy(e,out_dir) for e in outfiles]
        [os.remove(e) for e in outfiles]
    
        print("TRACK RUN FINISHED")
        print('results available in ' , out_dir)
    else:
        print("Silent mode ON: nothing is launched")
        
    return bigcomand
예제 #4
0
def gpt3(dtin, lat, lon, h_ell, C, it=0):
    """
    This subroutine determines pressure, temperature, temperature lapse rate,
    mean temperature of the water vapor, water vapour pressure, hydrostatic
    and wet mapping function coefficients ah and aw, water vapour decrease
    factor, geoid undulation and empirical tropospheric gradients for
    specific sites near the earth's surface.
    It is based on a 5 x 5 degree external grid file ('gpt3_5.grd') with mean
    values as well as sine and cosine amplitudes for the annual and
    semiannual variation of the coefficients.

    Parameters:
    ----------
    dtin :
        datatime in Python datetime object

    lat:
        ellipsoidal latitude in radians [-pi/2:+pi/2]

    lon:
        longitude in radians [-pi:pi] or [0:2pi]

    h_ell:
        ellipsoidal height in m

    it:
        case 1 no time variation but static quantities, case 0 with time variation (annual and semiannual terms)

    Returns:
    ----------
    p:
        pressure in hPa

    T:
        temperature in degrees Celsius

    dT:
        temperature lapse rate in degrees per km

    Tm:
        mean temperature weighted with the water vapor in degrees Kelvin

    e:
        water vapour pressure in hPa

    ah:
        hydrostatic mapping function coefficient at zero height (VMF3)

    aw:
        wet mapping function coefficient (VMF3)

    la:
        water vapour decrease factor

    undu:
        geoid undulation in m

    Gn_h:
        hydrostatic north gradient in m

    Ge_h:
        hydrostatic east gradient in m

    Gn_w:
        wet north gradient in m

    Ge_w:
        wet east gradient in m

    Notes
    ----------
        Modified for Python by Chaiyaporn Kitpracha

    Source
    ----------
        (c) Department of Geodesy and Geoinformation, Vienna University of
        Technology, 2017

        The copyright in this document is vested in the Department of Geodesy and
        Geoinformation (GEO), Vienna University of Technology, Austria. This document
        may only be reproduced in whole or in part, or stored in a retrieval
        system, or transmitted in any form, or by any means electronic,
        mechanical, photocopying or otherwise, either with the prior permission
        of GEO or in accordance with the terms of ESTEC Contract No.
        4000107329/12/NL/LvH.

        D. Landskron, J. Böhm (2018), VMF3/GPT3: Refined Discrete and Empirical Troposphere Mapping Functions,
        J Geod (2018) 92: 349., doi: 10.1007/s00190-017-1066-2.
        Download at: https://link.springer.com/content/pdf/10.1007%2Fs00190-017-1066-2.pdf
    """
    lat = np.array(lat)
    lon = np.array(lon)
    h_ell = np.array(h_ell)
    # Extract data from grid
    p_grid = C[:, 2:7]  # pressure in Pascal
    T_grid = C[:, 7:12]  # temperature in Kelvin
    Q_grid = C[:, 12:17] / 1000  # specific humidity in kg/kg
    dT_grid = C[:, 17:22] / 1000  # temperature lapse rate in Kelvin/m
    u_grid = C[:, 22]  # geoid undulation in m
    Hs_grid = C[:, 23]  # orthometric grid height in m
    ah_grid = C[:, 24:
                29] / 1000  # hydrostatic mapping function coefficient, dimensionless
    aw_grid = C[:, 29:
                34] / 1000  # wet mapping function coefficient, dimensionless
    la_grid = C[:, 34:39]  # water vapor decrease factor, dimensionless
    Tm_grid = C[:, 39:44]  # mean temperature in Kelvin
    Gn_h_grid = C[:, 44:49] / 100000  # hydrostatic north gradient in m
    Ge_h_grid = C[:, 49:54] / 100000  # hydrostatic east gradient in m
    Gn_w_grid = C[:, 54:59] / 100000  # wet north gradient in m
    Ge_w_grid = C[:, 59:64] / 100000  # wet east gradient in m

    # Convert from datetime to doy
    doy = float(conv.dt2doy(dtin)) + conv.dt2fracday(dtin)

    # determine the GPT3 coefficients

    # mean gravity in m/s**2
    gm = 9.80665
    # molar mass of dry air in kg/mol
    dMtr = 28.965e-3
    # universal gas constant in J/K/mol
    Rg = 8.3143

    # factors for amplitudes
    if it == 1:  # then  constant parameters
        cosfy = 0
        coshy = 0
        sinfy = 0
        sinhy = 0
    else:
        cosfy = np.cos(doy / 365.25 * 2 * np.pi)  # coefficient for A1
        coshy = np.cos(doy / 365.25 * 4 * np.pi)  # coefficient for B1
        sinfy = np.sin(doy / 365.25 * 2 * np.pi)  # coefficient for A2
        sinhy = np.sin(doy / 365.25 * 4 * np.pi)  # coefficient for B2

    nstat = lat.size

    # initialization
    p = np.zeros([nstat, 1])
    T = np.zeros([nstat, 1])
    dT = np.zeros([nstat, 1])
    Tm = np.zeros([nstat, 1])
    e = np.zeros([nstat, 1])
    ah = np.zeros([nstat, 1])
    aw = np.zeros([nstat, 1])
    la = np.zeros([nstat, 1])
    undu = np.zeros([nstat, 1])
    Gn_h = np.zeros([nstat, 1])
    Ge_h = np.zeros([nstat, 1])
    Gn_w = np.zeros([nstat, 1])
    Ge_w = np.zeros([nstat, 1])

    if lon < 0:
        plon = (lon + 2 * np.pi) * 180 / np.pi
    else:
        plon = lon * 180 / np.pi

    ppod = (-lat + np.pi / 2) * 180 / np.pi

    ipod = np.floor(ppod + 1)
    ilon = np.floor(plon + 1)

    # changed for the 1 degree grid
    diffpod = (ppod - (ipod - 0.5))
    difflon = (plon - (ilon - 0.5))

    if ipod == 181:
        ipod = 180

    if ilon == 361:
        ilon = 1

    if ilon == 0:
        ilon = 360

    indx = np.zeros(4)
    indx[0] = (ipod - 1) * 360 + ilon

    # near the poles: nearest neighbour interpolation, otherwise: bilinear
    # with the 1 degree grid the limits are lower and upper
    bilinear = 0
    if ppod > 0.5 and ppod < 179.5:
        bilinear = 1

    if bilinear == 0:
        ix = int(indx[0]) - 1

        # transforming ellipsoidal height to orthometric height
        undu = u_grid[ix]
        hgt = h_ell - undu

        # pressure, temperature at the height of the grid
        T0 = T_grid[ix, 0] + T_grid[ix, 1] * cosfy + T_grid[
            ix, 2] * sinfy + T_grid[ix, 3] * coshy + T_grid[ix, 4] * sinhy
        p0 = p_grid[ix, 0] + p_grid[ix, 1] * cosfy + p_grid[
            ix, 2] * sinfy + p_grid[ix, 3] * coshy + p_grid[ix, 4] * sinhy

        # specific humidity
        Q = Q_grid[ix, 0] + Q_grid[ix, 1] * cosfy + Q_grid[
            ix, 2] * sinfy + Q_grid[ix, 3] * coshy + Q_grid[ix, 4] * sinhy

        # lapse rate of the temperature
        dT = dT_grid[ix, 0] + dT_grid[ix, 1] * cosfy + dT_grid[
            ix, 2] * sinfy + dT_grid[ix, 3] * coshy + dT_grid[ix, 4] * sinhy

        # station height - grid height
        redh = hgt - Hs_grid[ix]

        # temperature at station height in Celsius
        T = T0 + dT * redh - 273.15

        # temperature lapse rate in degrees / km
        dT = dT * 1000

        # virtual temperature in Kelvin
        Tv = T0 * [1 + 0.6077 * Q]

        c = gm * dMtr / [Rg * Tv]

        # pressure in hPa
        p = [p0 * np.exp[-c * redh]] / 100

        # hydrostatic and wet coefficients ah and aw
        ah = ah_grid[ix, 0] + ah_grid[ix, 2] * cosfy + ah_grid[
            ix, 3] * sinfy + ah_grid[ix, 4] * coshy + ah_grid[ix, 5] * sinhy
        aw = aw_grid[ix, 0] + aw_grid[ix, 2] * cosfy + aw_grid[
            ix, 3] * sinfy + aw_grid[ix, 4] * coshy + aw_grid[ix, 5] * sinhy

        # water vapour decrease factor la
        la = la_grid[ix,0] + \
                la_grid[ix,1]*cosfy + la_grid[ix,2]*sinfy + \
                la_grid[ix,3]*coshy + la_grid[ix,4]*sinhy

        # mean temperature Tm
        Tm = Tm_grid[ix,0] + \
                Tm_grid[ix,1]*cosfy + Tm_grid[ix,2]*sinfy + \
                Tm_grid[ix,3]*coshy + Tm_grid[ix,4]*sinhy

        # north and east gradients [total, hydrostatic and wet]
        Gn_h = Gn_h_grid[ix, 0] + Gn_h_grid[ix, 1] * cosfy + Gn_h_grid[
            ix, 2] * sinfy + Gn_h_grid[ix, 3] * coshy + Gn_h_grid[ix,
                                                                  4] * sinhy
        Ge_h = Ge_h_grid[ix, 0] + Ge_h_grid[ix, 1] * cosfy + Ge_h_grid[
            ix, 2] * sinfy + Ge_h_grid[ix, 3] * coshy + Ge_h_grid[ix,
                                                                  4] * sinhy
        Gn_w = Gn_w_grid[ix, 0] + Gn_w_grid[ix, 1] * cosfy + Gn_w_grid[
            ix, 2] * sinfy + Gn_w_grid[ix, 3] * coshy + Gn_w_grid[ix,
                                                                  4] * sinhy
        Ge_w = Ge_w_grid[ix, 0] + Ge_w_grid[ix, 1] * cosfy + Ge_w_grid[
            ix, 2] * sinfy + Ge_w_grid[ix, 3] * coshy + Ge_w_grid[ix,
                                                                  4] * sinhy

        # water vapor pressure in hPa
        e0 = Q * p0 / [0.622 + 0.378 * Q] / 100  # on the grid
        e = e0 * (100 * p / p0)**(
            la + 1)  # on the station height - (14] Askne and Nordius, 1987

    else:
        ipod1 = ipod + 1 * np.sign(diffpod)
        ilon1 = ilon + 1 * np.sign(difflon)

        # changed for the 1 degree grid
        if ilon1 == 361:
            ilon1 = 1

        if ilon1 == 0:
            ilon1 = 360

        # get the number of the line
        # changed for the 1 degree grid
        indx[1] = (ipod1 - 1) * 360 + ilon  # along same longitude
        indx[2] = (ipod - 1) * 360 + ilon1  # along same polar distance
        indx[3] = (ipod1 - 1) * 360 + ilon1  # diagonal
        indx = indx.astype(int)
        indx = indx - 1

        # transforming ellipsoidal height to orthometric height: Hortho = -N + Hell
        undul = u_grid[indx]
        hgt = h_ell - undul

        # pressure, temperature at the height of the grid
        T0 = T_grid[indx, 0] + T_grid[indx, 1] * cosfy + T_grid[
            indx, 2] * sinfy + T_grid[indx, 3] * coshy + T_grid[indx,
                                                                4] * sinhy
        p0 = p_grid[indx, 0] + p_grid[indx, 1] * cosfy + p_grid[
            indx, 2] * sinfy + p_grid[indx, 3] * coshy + p_grid[indx,
                                                                4] * sinhy

        # humidity
        Ql = Q_grid[indx, 0] + Q_grid[indx, 1] * cosfy + Q_grid[
            indx, 2] * sinfy + Q_grid[indx, 3] * coshy + Q_grid[indx,
                                                                4] * sinhy

        # reduction = stationheight - gridheight
        Hs1 = Hs_grid[indx]
        redh = hgt - Hs1

        # lapse rate of the temperature in degree / m
        dTl = dT_grid[indx, 0] + dT_grid[indx, 1] * cosfy + dT_grid[
            indx, 2] * sinfy + dT_grid[indx, 3] * coshy + dT_grid[indx,
                                                                  4] * sinhy

        # temperature reduction to station height
        Tl = T0 + dTl * redh - 273.15

        # virtual temperature
        Tv = T0 * (1 + 0.6077 * Ql)
        c = gm * dMtr / (Rg * Tv)

        # pressure in hPa
        pl = (p0 * np.exp(-c * redh)) / 100

        # hydrostatic and wet coefficients ah and aw
        ahl = ah_grid[indx, 0] + ah_grid[indx, 1] * cosfy + ah_grid[
            indx, 2] * sinfy + ah_grid[indx, 3] * coshy + ah_grid[indx,
                                                                  4] * sinhy
        awl = aw_grid[indx, 0] + aw_grid[indx, 1] * cosfy + aw_grid[
            indx, 2] * sinfy + aw_grid[indx, 3] * coshy + aw_grid[indx,
                                                                  4] * sinhy

        # water vapour decrease factor la
        lal = la_grid[indx, 0] + la_grid[indx, 1] * cosfy + la_grid[
            indx, 2] * sinfy + la_grid[indx, 3] * coshy + la_grid[indx,
                                                                  4] * sinhy

        # mean temperature of the water vapor Tm
        Tml = Tm_grid[indx, 0] + Tm_grid[indx, 1] * cosfy + Tm_grid[
            indx, 2] * sinfy + Tm_grid[indx, 3] * coshy + Tm_grid[indx,
                                                                  4] * sinhy

        # north and east gradients [total, hydrostatic and wet]
        Gn_hl = Gn_h_grid[indx, 0] + Gn_h_grid[indx, 1] * cosfy + Gn_h_grid[
            indx, 2] * sinfy + Gn_h_grid[indx, 3] * coshy + Gn_h_grid[
                indx, 4] * sinhy
        Ge_hl = Ge_h_grid[indx, 0] + Ge_h_grid[indx, 1] * cosfy + Ge_h_grid[
            indx, 2] * sinfy + Ge_h_grid[indx, 3] * coshy + Ge_h_grid[
                indx, 4] * sinhy
        Gn_wl = Gn_w_grid[indx, 0] + Gn_w_grid[indx, 1] * cosfy + Gn_w_grid[
            indx, 2] * sinfy + Gn_w_grid[indx, 3] * coshy + Gn_w_grid[
                indx, 4] * sinhy
        Ge_wl = Ge_w_grid[indx, 0] + Ge_w_grid[indx, 1] * cosfy + Ge_w_grid[
            indx, 2] * sinfy + Ge_w_grid[indx, 3] * coshy + Ge_w_grid[
                indx, 4] * sinhy

        # water vapor pressure in hPa
        e0 = Ql * p0 / (0.622 + 0.378 * Ql) / 100  # on the grid
        el = e0 * (100 * pl / p0)**(
            lal + 1)  # on the station height - [14] Askne and Nordius, 1987

        dnpod1 = abs(diffpod)  # distance nearer point
        dnpod2 = 1 - dnpod1  # distance to distant point
        dnlon1 = abs(difflon)
        dnlon2 = 1 - dnlon1

        # pressure
        R1 = dnpod2 * pl[0] + dnpod1 * pl[1]
        R2 = dnpod2 * pl[2] + dnpod1 * pl[3]
        p = dnlon2 * R1 + dnlon1 * R2

        # temperature
        R1 = dnpod2 * Tl[0] + dnpod1 * Tl[1]
        R2 = dnpod2 * Tl[2] + dnpod1 * Tl[3]
        T = dnlon2 * R1 + dnlon1 * R2

        # temperature in degree per km
        R1 = dnpod2 * dTl[0] + dnpod1 * dTl[1]
        R2 = dnpod2 * dTl[2] + dnpod1 * dTl[3]
        dT = (dnlon2 * R1 + dnlon1 * R2) * 1000

        # water vapor pressure in hPa
        R1 = dnpod2 * el[0] + dnpod1 * el[1]
        R2 = dnpod2 * el[2] + dnpod1 * el[3]
        e = dnlon2 * R1 + dnlon1 * R2

        # ah and aw
        R1 = dnpod2 * ahl[0] + dnpod1 * ahl[1]
        R2 = dnpod2 * ahl[2] + dnpod1 * ahl[3]
        ah = dnlon2 * R1 + dnlon1 * R2
        R1 = dnpod2 * awl[0] + dnpod1 * awl[1]
        R2 = dnpod2 * awl[2] + dnpod1 * awl[3]
        aw = dnlon2 * R1 + dnlon1 * R2

        # undulation
        R1 = dnpod2 * undul[0] + dnpod1 * undul[1]
        R2 = dnpod2 * undul[2] + dnpod1 * undul[3]
        undu = dnlon2 * R1 + dnlon1 * R2

        # water vapor decrease factor la
        R1 = dnpod2 * lal[0] + dnpod1 * lal[1]
        R2 = dnpod2 * lal[2] + dnpod1 * lal[3]
        la = dnlon2 * R1 + dnlon1 * R2

        # gradients
        R1 = dnpod2 * Gn_hl[0] + dnpod1 * Gn_hl[1]
        R2 = dnpod2 * Gn_hl[2] + dnpod1 * Gn_hl[3]
        Gn_h = (dnlon2 * R1 + dnlon1 * R2)
        R1 = dnpod2 * Ge_hl[0] + dnpod1 * Ge_hl[1]
        R2 = dnpod2 * Ge_hl[2] + dnpod1 * Ge_hl[3]
        Ge_h = (dnlon2 * R1 + dnlon1 * R2)
        R1 = dnpod2 * Gn_wl[0] + dnpod1 * Gn_wl[1]
        R2 = dnpod2 * Gn_wl[2] + dnpod1 * Gn_wl[3]
        Gn_w = (dnlon2 * R1 + dnlon1 * R2)
        R1 = dnpod2 * Ge_wl[0] + dnpod1 * Ge_wl[1]
        R2 = dnpod2 * Ge_wl[2] + dnpod1 * Ge_wl[3]
        Ge_w = (dnlon2 * R1 + dnlon1 * R2)

        # mean temperature of the water vapor Tm
        R1 = dnpod2 * Tml[0] + dnpod1 * Tml[1]
        R2 = dnpod2 * Tml[2] + dnpod1 * Tml[3]
        Tm = dnlon2 * R1 + dnlon1 * R2

    soln = [np.round(p,3),np.round(T,3),np.round(dT,3),np.round(Tm,3),np.round(e,3), \
            np.round(ah,3),np.round(aw,3),np.round(la,3),np.round(undu,3),np.round(Gn_h,3),np.round(Ge_h,3), \
            np.round(Gn_w,3),np.round(Ge_w,3)]
    return soln