Example #1
0
def test_read_EOP(EPOCH):
    #-- convert dates to Modified Julian days (days since 1858-11-17T00:00:00)
    delta_time = 86400.0 * np.arange(0, 365)
    MJD = pyTMD.time.convert_delta_time(delta_time,
                                        epoch1=(2000, 1, 1, 0, 0, 0),
                                        epoch2=(1858, 11, 17, 0, 0, 0),
                                        scale=1.0 / 86400.0)
    #-- add offset to convert to Julian days and then convert to calendar dates
    Y, M, D, h, m, s = pyTMD.time.convert_julian(2400000.5 + MJD,
                                                 FORMAT='tuple')
    #-- calculate time in year-decimal format
    time_decimal = pyTMD.time.convert_calendar_decimal(Y,
                                                       M,
                                                       day=D,
                                                       hour=h,
                                                       minute=m,
                                                       second=s)
    #-- mean and daily EOP files
    mean_pole_file = pyTMD.utilities.get_data_path(['data', 'mean-pole.tab'])
    pole_tide_file = pyTMD.utilities.get_data_path(['data', 'finals.all'])
    #-- calculate angular coordinates of mean pole at time
    #-- iterate over different IERS conventional mean pole (CMP) formulations
    mpx, mpy, fl = iers_mean_pole(mean_pole_file, time_decimal, EPOCH)
    #-- check flags
    assert np.all(fl)
    #-- read IERS daily polar motion values
    EOP = read_iers_EOP(pole_tide_file)
    #-- check validity
    assert np.all(np.isfinite(EOP['x'])) & np.all(np.isfinite(EOP['y']))
    #-- interpolate daily polar motion values to time using cubic splines
    xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['x'], k=3, s=0)
    ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['y'], k=3, s=0)
    px = xSPL(MJD)
    py = ySPL(MJD)
    #-- calculate differentials from mean pole positions
    mx = px - mpx
    my = -(py - mpy)
    #-- check validity of differentials
    assert np.all(np.isfinite(mx)) & np.all(np.isfinite(my))
Example #2
0
def compute_OPT_icebridge_data(tide_dir,
                               arg,
                               METHOD=None,
                               VERBOSE=False,
                               MODE=0o775):

    #-- extract file name and subsetter indices lists
    match_object = re.match(r'(.*?)(\[(.*?)\])?$', arg)
    input_file = os.path.expanduser(match_object.group(1))
    #-- subset input file to indices
    if match_object.group(2):
        #-- decompress ranges and add to list
        input_subsetter = []
        for i in re.findall(r'((\d+)-(\d+)|(\d+))', match_object.group(3)):
            input_subsetter.append(int(i[3])) if i[3] else \
                input_subsetter.extend(range(int(i[1]),int(i[2])+1))
    else:
        input_subsetter = None

    #-- output directory for input_file
    DIRECTORY = os.path.dirname(input_file)
    #-- calculate if input files are from ATM or LVIS (+GH)
    regex = {}
    regex[
        'ATM'] = r'(BLATM2|ILATM2)_(\d+)_(\d+)_smooth_nadir(.*?)(csv|seg|pt)$'
    regex['ATM1b'] = r'(BLATM1b|ILATM1b)_(\d+)_(\d+)(.*?).(qi|TXT|h5)$'
    regex['LVIS'] = r'(BLVIS2|BVLIS2|ILVIS2)_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5$'
    regex['LVGH'] = r'(ILVGH2)_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5$'
    for key, val in regex.items():
        if re.match(val, os.path.basename(input_file)):
            OIB = key

    #-- invalid value
    fill_value = -9999.0
    #-- output netCDF4 and HDF5 file attributes
    #-- will be added to YAML header in csv files
    attrib = {}
    #-- latitude
    attrib['lat'] = {}
    attrib['lat']['long_name'] = 'Latitude_of_measurement'
    attrib['lat']['description'] = ('Corresponding_to_the_measurement_'
                                    'position_at_the_acquisition_time')
    attrib['lat']['units'] = 'Degrees_North'
    #-- longitude
    attrib['lon'] = {}
    attrib['lon']['long_name'] = 'Longitude_of_measurement'
    attrib['lon']['description'] = ('Corresponding_to_the_measurement_'
                                    'position_at_the_acquisition_time')
    attrib['lon']['units'] = 'Degrees_East'
    #-- ocean pole tides
    attrib['tide_oc_pole'] = {}
    attrib['tide_oc_pole']['long_name'] = 'Ocean_Pole_Tide'
    attrib['tide_oc_pole']['description'] = (
        'Ocean_pole_tide_radial_'
        'displacements_at_the_measurement_position_at_the_acquisition_time_due_'
        'to_polar_motion')
    attrib['tide_oc_pole']['reference'] = (
        'ftp://tai.bipm.org/iers/conv2010/'
        'chapter7/opoleloadcoefcmcor.txt.gz')
    attrib['tide_oc_pole']['units'] = 'meters'
    #-- Modified Julian Days
    attrib['time'] = {}
    attrib['time']['long_name'] = 'Time'
    attrib['time']['units'] = 'days since 1858-11-17T00:00:00'
    attrib['time']['description'] = 'Modified Julian Days'
    attrib['time']['calendar'] = 'standard'

    #-- extract information from first input file
    #-- acquisition year, month and day
    #-- number of points
    #-- instrument (PRE-OIB ATM or LVIS, OIB ATM or LVIS)
    if OIB in ('ATM', 'ATM1b'):
        M1, YYMMDD1, HHMMSS1, AX1, SF1 = re.findall(regex[OIB],
                                                    input_file).pop()
        #-- early date strings omitted century and millenia (e.g. 93 for 1993)
        if (len(YYMMDD1) == 6):
            ypre, MM1, DD1 = YYMMDD1[:2], YYMMDD1[2:4], YYMMDD1[4:]
            if (np.float(ypre) >= 90):
                YY1 = '{0:4.0f}'.format(np.float(ypre) + 1900.0)
            else:
                YY1 = '{0:4.0f}'.format(np.float(ypre) + 2000.0)
        elif (len(YYMMDD1) == 8):
            YY1, MM1, DD1 = YYMMDD1[:4], YYMMDD1[4:6], YYMMDD1[6:]
    elif OIB in ('LVIS', 'LVGH'):
        M1, RG1, YY1, MMDD1, RLD1, SS1 = re.findall(regex[OIB],
                                                    input_file).pop()
        MM1, DD1 = MMDD1[:2], MMDD1[2:]

    #-- read data from input_file
    print('{0} -->'.format(input_file)) if VERBOSE else None
    if (OIB == 'ATM'):
        #-- load IceBridge ATM data from input_file
        dinput, file_lines, HEM = read_ATM_icessn_file(input_file,
                                                       input_subsetter)
    elif (OIB == 'ATM1b'):
        #-- load IceBridge Level-1b ATM data from input_file
        dinput, file_lines, HEM = read_ATM_qfit_file(input_file,
                                                     input_subsetter)
    elif OIB in ('LVIS', 'LVGH'):
        #-- load IceBridge LVIS data from input_file
        dinput, file_lines, HEM = read_LVIS_HDF5_file(input_file,
                                                      input_subsetter)

    #-- extract lat/lon
    lon = dinput['lon'][:]
    lat = dinput['lat'][:]
    #-- convert time from UTC time of day to modified julian days (MJD)
    #-- J2000: seconds since 2000-01-01 12:00:00 UTC
    t = dinput['time'][:] / 86400.0 + 51544.5
    #-- convert from MJD to calendar dates
    YY, MM, DD, HH, MN, SS = convert_julian(t + 2400000.5, FORMAT='tuple')
    #-- convert calendar dates into year decimal
    tdec = convert_calendar_decimal(YY,
                                    MM,
                                    DAY=DD,
                                    HOUR=HH,
                                    MINUTE=MN,
                                    SECOND=SS)
    #-- elevation
    h1 = dinput['data'][:]

    #-- degrees to radians and arcseconds to radians
    dtr = np.pi / 180.0
    atr = np.pi / 648000.0
    #-- earth and physical parameters (IERS)
    G = 6.67428e-11  #-- universal constant of gravitation [m^3/(kg*s^2)]
    GM = 3.986004418e14  #-- geocentric gravitational constant [m^3/s^2]
    ge = 9.7803278  #-- mean equatorial gravity [m/s^2]
    a_axis = 6378136.6  #-- equatorial radius of the Earth [m]
    flat = 1.0 / 298.257223563  #-- flattening of the ellipsoid
    omega = 7.292115e-5  #-- mean rotation rate of the Earth [radians/s]
    rho_w = 1025.0  #-- density of sea water [kg/m^3]
    ge = 9.7803278  #-- mean equatorial gravitational acceleration [m/s^2]
    #-- Linear eccentricity and first numerical eccentricity
    lin_ecc = np.sqrt((2.0 * flat - flat**2) * a_axis**2)
    ecc1 = lin_ecc / a_axis
    #-- tidal love number differential (1 + kl - hl) for pole tide frequencies
    gamma = 0.6870 + 0.0036j

    #-- convert from geodetic latitude to geocentric latitude
    #-- geodetic latitude in radians
    latitude_geodetic_rad = lat * dtr
    #-- prime vertical radius of curvature
    N = a_axis / np.sqrt(1.0 - ecc1**2.0 * np.sin(latitude_geodetic_rad)**2.0)
    #-- calculate X, Y and Z from geodetic latitude and longitude
    X = (N + h1) * np.cos(latitude_geodetic_rad) * np.cos(lon * dtr)
    Y = (N + h1) * np.cos(latitude_geodetic_rad) * np.sin(lon * dtr)
    Z = (N * (1.0 - ecc1**2.0) + h1) * np.sin(latitude_geodetic_rad)
    rr = np.sqrt(X**2.0 + Y**2.0 + Z**2.0)
    #-- calculate geocentric latitude and convert to degrees
    latitude_geocentric = np.arctan(Z / np.sqrt(X**2.0 + Y**2.0)) / dtr

    #-- pole tide displacement scale factor
    Hp = np.sqrt(8.0 * np.pi / 15.0) * (omega**2 * a_axis**4) / GM
    K = 4.0 * np.pi * G * rho_w * Hp * a_axis / (3.0 * ge)
    K1 = 4.0 * np.pi * G * rho_w * Hp * a_axis**3 / (3.0 * GM)

    #-- read ocean pole tide map from Desai (2002)
    ocean_pole_tide_file = get_data_path(['data', 'opoleloadcoefcmcor.txt.gz'])
    iur, iun, iue, ilon, ilat = read_ocean_pole_tide(ocean_pole_tide_file)

    #-- pole tide files (mean and daily)
    # mean_pole_file = os.path.join(tide_dir,'mean-pole.tab')
    mean_pole_file = os.path.join(tide_dir, 'mean_pole_2017-10-23.tab')
    pole_tide_file = os.path.join(tide_dir, 'finals_all_2017-09-01.tab')

    #-- read IERS daily polar motion values
    EOP = read_iers_EOP(pole_tide_file)
    #-- create cubic spline interpolations of daily polar motion values
    xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['x'], k=3, s=0)
    ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['y'], k=3, s=0)
    #-- bad value
    fill_value = -9999.0

    #-- output ocean pole tide HDF5 file
    #-- form: rg_NASA_OCEAN_POLE_TIDE_WGS84_fl1yyyymmddjjjjj.H5
    #-- where rg is the hemisphere flag (GR or AN) for the region
    #-- fl1 and fl2 are the data flags (ATM, LVIS, GLAS)
    #-- yymmddjjjjj is the year, month, day and second of the input file
    #-- output region flags: GR for Greenland and AN for Antarctica
    hem_flag = {'N': 'GR', 'S': 'AN'}
    #-- use starting second to distinguish between files for the day
    JJ1 = np.min(dinput['time']) % 86400
    #-- output file format
    args = (hem_flag[HEM], 'OCEAN_POLE_TIDE', OIB, YY1, MM1, DD1, JJ1)
    FILENAME = '{0}_NASA_{1}_WGS84_{2}{3}{4}{5}{6:05.0f}.H5'.format(*args)
    #-- print file information
    print('\t{0}'.format(FILENAME)) if VERBOSE else None

    #-- open output HDF5 file
    fid = h5py.File(os.path.join(DIRECTORY, FILENAME), 'w')

    #-- interpolate ocean pole tide map from Desai (2002)
    if (METHOD == 'spline'):
        #-- use scipy bivariate splines to interpolate to output points
        f1 = scipy.interpolate.RectBivariateSpline(ilon,
                                                   ilat[::-1],
                                                   iur[:, ::-1].real,
                                                   kx=1,
                                                   ky=1)
        f2 = scipy.interpolate.RectBivariateSpline(ilon,
                                                   ilat[::-1],
                                                   iur[:, ::-1].imag,
                                                   kx=1,
                                                   ky=1)
        UR = np.zeros((file_lines), dtype=np.complex128)
        UR.real = f1.ev(lon, latitude_geocentric)
        UR.imag = f2.ev(lon, latitude_geocentric)
    else:
        #-- use scipy regular grid to interpolate values for a given method
        r1 = scipy.interpolate.RegularGridInterpolator((ilon, ilat[::-1]),
                                                       iur[:, ::-1],
                                                       method=METHOD)
        UR = r1.__call__(np.c_[lon, latitude_geocentric])

    #-- calculate angular coordinates of mean pole at time tdec
    mpx, mpy, fl = iers_mean_pole(mean_pole_file, tdec, '2015')
    #-- interpolate daily polar motion values to t using cubic splines
    px = xSPL(t)
    py = ySPL(t)
    #-- calculate differentials from mean pole positions
    mx = px - mpx
    my = -(py - mpy)
    #-- calculate radial displacement at time
    Urad = np.ma.zeros((file_lines), fill_value=fill_value)
    Urad.data[:] = K * atr * np.real(
        (mx * gamma.real + my * gamma.imag) * UR.real +
        (my * gamma.real - mx * gamma.imag) * UR.imag)
    #-- replace fill values
    Urad.mask = np.isnan(Urad.data)
    Urad.data[Urad.mask] = Urad.fill_value

    #-- add latitude and longitude to output file
    for key in ['lat', 'lon']:
        #-- Defining the HDF5 dataset variables for lat/lon
        h5 = fid.create_dataset(key, (file_lines, ),
                                data=dinput[key][:],
                                dtype=dinput[key].dtype,
                                compression='gzip')
        #-- add HDF5 variable attributes
        for att_name, att_val in attrib[key].items():
            h5.attrs[att_name] = att_val
        #-- attach dimensions
        h5.dims[0].label = 'RECORD_SIZE'

    #-- output tides to HDF5 dataset
    h5 = fid.create_dataset('tide_oc_pole', (file_lines, ),
                            data=Urad,
                            dtype=Urad.dtype,
                            fillvalue=fill_value,
                            compression='gzip')
    #-- add HDF5 variable attributes
    h5.attrs['_FillValue'] = fill_value
    for att_name, att_val in attrib['tide_oc_pole'].items():
        h5.attrs[att_name] = att_val
    #-- attach dimensions
    h5.dims[0].label = 'RECORD_SIZE'

    #-- output days to HDF5 dataset
    h5 = fid.create_dataset('time', (file_lines, ),
                            data=t,
                            dtype=t.dtype,
                            compression='gzip')
    #-- add HDF5 variable attributes
    for att_name, att_val in attrib['time'].items():
        h5.attrs[att_name] = att_val
    #-- attach dimensions
    h5.dims[0].label = 'RECORD_SIZE'

    #-- HDF5 file attributes
    fid.attrs['featureType'] = 'trajectory'
    fid.attrs['title'] = 'Tidal_correction_for_elevation_measurements'
    fid.attrs['summary'] = ('Ocean_pole_tide_radial_displacements_'
                            'computed_at_elevation_measurements.')
    fid.attrs['project'] = 'NASA_Operation_IceBridge'
    fid.attrs['processing_level'] = '4'
    fid.attrs['date_created'] = time.strftime('%Y-%m-%d', time.localtime())
    #-- add attributes for input files
    fid.attrs['elevation_file'] = os.path.basename(input_file)
    #-- add geospatial and temporal attributes
    fid.attrs['geospatial_lat_min'] = dinput['lat'].min()
    fid.attrs['geospatial_lat_max'] = dinput['lat'].max()
    fid.attrs['geospatial_lon_min'] = dinput['lon'].min()
    fid.attrs['geospatial_lon_max'] = dinput['lon'].max()
    fid.attrs['geospatial_lat_units'] = "degrees_north"
    fid.attrs['geospatial_lon_units'] = "degrees_east"
    fid.attrs['geospatial_ellipsoid'] = "WGS84"
    fid.attrs['time_type'] = 'UTC'

    #-- convert start/end time from MJD into Julian days
    JD_start = np.min(t) + 2400000.5
    JD_end = np.max(t) + 2400000.5
    #-- convert to calendar date with convert_julian.py
    cal = convert_julian(np.array([JD_start, JD_end]), ASTYPE=np.int)
    #-- add attributes with measurement date start, end and duration
    args = (cal['hour'][0], cal['minute'][0], cal['second'][0])
    fid.attrs['RangeBeginningTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args)
    args = (cal['hour'][-1], cal['minute'][-1], cal['second'][-1])
    fid.attrs['RangeEndingTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args)
    args = (cal['year'][0], cal['month'][0], cal['day'][0])
    fid.attrs['RangeBeginningDate'] = '{0:4d}-{1:02d}-{2:02d}'.format(*args)
    args = (cal['year'][-1], cal['month'][-1], cal['day'][-1])
    fid.attrs['RangeEndingDate'] = '{0:4d}-{1:02d}-{2:02d}'.format(*args)
    duration = np.round(JD_end * 86400.0 - JD_start * 86400.0)
    fid.attrs['DurationTimeSeconds'] = '{0:0.0f}'.format(duration)
    #-- close the output HDF5 dataset
    fid.close()
    #-- change the permissions level to MODE
    os.chmod(os.path.join(DIRECTORY, FILENAME), MODE)
def compute_OPT_displacements(tide_dir, input_file, output_file,
    FORMAT='csv', VARIABLES=['time','lat','lon','data'], HEADER=0, TYPE='drift',
    TIME_UNITS='days since 1858-11-17T00:00:00', TIME=None, PROJECTION='4326',
    METHOD='spline', VERBOSE=False, MODE=0o775):

    #-- invalid value
    fill_value = -9999.0
    #-- output netCDF4 and HDF5 file attributes
    #-- will be added to YAML header in csv files
    attrib = {}
    #-- latitude
    attrib['lat'] = {}
    attrib['lat']['long_name'] = 'Latitude'
    attrib['lat']['units'] = 'Degrees_North'
    #-- longitude
    attrib['lon'] = {}
    attrib['lon']['long_name'] = 'Longitude'
    attrib['lon']['units'] = 'Degrees_East'
    #-- ocean pole tides
    attrib['tide_oc_pole'] = {}
    attrib['tide_oc_pole']['long_name'] = 'Ocean_Pole_Tide'
    attrib['tide_oc_pole']['description'] = ('Ocean_pole_tide_radial_'
        'displacements_time_due_to_polar_motion')
    attrib['tide_oc_pole']['reference'] = ('ftp://tai.bipm.org/iers/conv2010/'
        'chapter7/opoleloadcoefcmcor.txt.gz')
    attrib['tide_oc_pole']['units'] = 'meters'
    attrib['tide_oc_pole']['_FillValue'] = fill_value
    #-- Modified Julian Days
    attrib['time'] = {}
    attrib['time']['long_name'] = 'Time'
    attrib['time']['units'] = 'days since 1858-11-17T00:00:00'
    attrib['time']['description'] = 'Modified Julian Days'
    attrib['time']['calendar'] = 'standard'

    #-- read input file to extract time, spatial coordinates and data
    if (FORMAT == 'csv'):
        dinput = pyTMD.spatial.from_ascii(input_file, columns=VARIABLES,
            header=HEADER, verbose=VERBOSE)
    elif (FORMAT == 'netCDF4'):
        dinput = pyTMD.spatial.from_netCDF4(input_file, timename=VARIABLES[0],
            xname=VARIABLES[2], yname=VARIABLES[1], varname=VARIABLES[3],
            verbose=VERBOSE)
    elif (FORMAT == 'HDF5'):
        dinput = pyTMD.spatial.from_HDF5(input_file, timename=VARIABLES[0],
            xname=VARIABLES[2], yname=VARIABLES[1], varname=VARIABLES[3],
            verbose=VERBOSE)
    elif (FORMAT == 'geotiff'):
        dinput = pyTMD.spatial.from_geotiff(input_file, verbose=VERBOSE)
        #-- copy global geotiff attributes for projection and grid parameters
        for att_name in ['projection','wkt','spacing','extent']:
            attrib[att_name] = dinput['attributes'][att_name]
    #-- update time variable if entered as argument
    if TIME is not None:
        dinput['time'] = np.copy(TIME)

    #-- converting x,y from projection to latitude/longitude
    #-- could try to extract projection attributes from netCDF4 and HDF5 files
    try:
        crs1 = pyproj.CRS.from_string("epsg:{0:d}".format(int(PROJECTION)))
    except (ValueError,pyproj.exceptions.CRSError):
        crs1 = pyproj.CRS.from_string(PROJECTION)
    crs2 = pyproj.CRS.from_string("epsg:{0:d}".format(4326))
    transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True)
    if (TYPE == 'grid'):
        ny,nx = (len(dinput['y']),len(dinput['x']))
        gridx,gridy = np.meshgrid(dinput['x'],dinput['y'])
        lon,lat = transformer.transform(gridx.flatten(),gridy.flatten())
    elif (TYPE == 'drift'):
        lon,lat = transformer.transform(dinput['x'].flatten(),
            dinput['y'].flatten())

    #-- extract time units from netCDF4 and HDF5 attributes or from TIME_UNITS
    try:
        time_string = dinput['attributes']['time']['units']
    except (TypeError, KeyError):
        epoch1,to_secs = pyTMD.time.parse_date_string(TIME_UNITS)
    else:
        epoch1,to_secs = pyTMD.time.parse_date_string(time_string)
    #-- convert dates to Modified Julian days (days since 1858-11-17T00:00:00)
    MJD = pyTMD.time.convert_delta_time(to_secs*dinput['time'].flatten(),
        epoch1=epoch1, epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0)
    #-- add offset to convert to Julian days and then convert to calendar dates
    Y,M,D,h,m,s = pyTMD.time.convert_julian(2400000.5 + MJD, FORMAT='tuple')
    #-- calculate time in year-decimal format
    time_decimal = pyTMD.time.convert_calendar_decimal(Y,M,day=D,
        hour=h,minute=m,second=s)
    #-- number of time points
    nt = len(time_decimal)

    #-- degrees to radians and arcseconds to radians
    dtr = np.pi/180.0
    atr = np.pi/648000.0
    #-- earth and physical parameters (IERS and WGS84)
    G = 6.67428e-11#-- universal constant of gravitation [m^3/(kg*s^2)]
    GM = 3.986004418e14#-- geocentric gravitational constant [m^3/s^2]
    a_axis = 6378136.6#-- WGS84 equatorial radius of the Earth [m]
    flat = 1.0/298.257223563#-- flattening of the WGS84 ellipsoid
    omega = 7.292115e-5#-- mean rotation rate of the Earth [radians/s]
    rho_w = 1025.0#-- density of sea water [kg/m^3]
    ge = 9.7803278#-- mean equatorial gravitational acceleration [m/s^2]
    #-- Linear eccentricity and first numerical eccentricity
    lin_ecc = np.sqrt((2.0*flat - flat**2)*a_axis**2)
    ecc1 = lin_ecc/a_axis
    #-- tidal love number differential (1 + kl - hl) for pole tide frequencies
    gamma = 0.6870 + 0.0036j

    #-- flatten heights
    h = dinput['data'].flatten() if ('data' in dinput.keys()) else 0.0
    #-- convert from geodetic latitude to geocentric latitude
    #-- calculate X, Y and Z from geodetic latitude and longitude
    X,Y,Z = pyTMD.spatial.to_cartesian(lon,lat,h=h,a_axis=a_axis,flat=flat)
    #-- calculate geocentric latitude and convert to degrees
    latitude_geocentric = np.arctan(Z / np.sqrt(X**2.0 + Y**2.0))/dtr

    #-- pole tide displacement scale factor
    Hp = np.sqrt(8.0*np.pi/15.0)*(omega**2*a_axis**4)/GM
    K = 4.0*np.pi*G*rho_w*Hp*a_axis/(3.0*ge)
    K1 = 4.0*np.pi*G*rho_w*Hp*a_axis**3/(3.0*GM)

    #-- pole tide files (mean and daily)
    mean_pole_file = get_data_path(['data','mean-pole.tab'])
    pole_tide_file = get_data_path(['data','finals.all'])
    #-- calculate angular coordinates of mean pole at time
    mpx,mpy,fl = iers_mean_pole(mean_pole_file,time_decimal,'2015')
    #-- read IERS daily polar motion values
    EOP = read_iers_EOP(pole_tide_file)
    #-- interpolate daily polar motion values to t1 using cubic splines
    xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'],EOP['x'],k=3,s=0)
    ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'],EOP['y'],k=3,s=0)
    px = xSPL(MJD)
    py = ySPL(MJD)
    #-- calculate differentials from mean pole positions
    mx = px - mpx
    my = -(py - mpy)

    #-- read ocean pole tide map from Desai (2002)
    ocean_pole_tide_file = get_data_path(['data','opoleloadcoefcmcor.txt.gz'])
    iur,iun,iue,ilon,ilat = read_ocean_pole_tide(ocean_pole_tide_file)
    #-- interpolate ocean pole tide map from Desai (2002)
    if (METHOD == 'spline'):
        #-- use scipy bivariate splines to interpolate to output points
        f1 = scipy.interpolate.RectBivariateSpline(ilon, ilat[::-1],
            iur[:,::-1].real, kx=1, ky=1)
        f2 = scipy.interpolate.RectBivariateSpline(ilon, ilat[::-1],
            iur[:,::-1].imag, kx=1, ky=1)
        UR = np.zeros((len(latitude_geocentric)),dtype=np.complex128)
        UR.real = f1.ev(lon,latitude_geocentric)
        UR.imag = f2.ev(lon,latitude_geocentric)
    else:
        #-- use scipy regular grid to interpolate values for a given method
        r1 = scipy.interpolate.RegularGridInterpolator((ilon,ilat[::-1]),
            iur[:,::-1], method=METHOD)
        UR = r1.__call__(np.c_[lon,latitude_geocentric])

    #-- calculate radial displacement at time
    if (TYPE == 'grid'):
        Urad = np.ma.zeros((ny,nx,nt),fill_value=fill_value)
        Urad.mask = np.zeros((ny,nx,nt),dtype=bool)
        for i in range(nt):
            URAD = K*atr*np.real((mx[i]*gamma.real + my[i]*gamma.imag)*UR.real +
                (my[i]*gamma.real - mx[i]*gamma.imag)*UR.imag)
            #-- reform grid
            Urad.data[:,:,i] = np.reshape(URAD, (ny,nx))
            Urad.mask[:,:,i] = np.isnan(URAD)
    elif (TYPE == 'drift'):
        Urad = np.ma.zeros((nt),fill_value=fill_value)
        Urad.data[:] = K*atr*np.real((mx*gamma.real + my*gamma.imag)*UR.real +
            (my*gamma.real - mx*gamma.imag)*UR.imag)
        Urad.mask = np.isnan(Urad.data)
    #-- replace invalid data with fill values
    Urad.data[Urad.mask] = Urad.fill_value

    #-- output to file
    output = dict(time=MJD,lon=lon,lat=lat,tide_oc_pole=Urad)
    if (FORMAT == 'csv'):
        pyTMD.spatial.to_ascii(output, attrib, output_file, delimiter=',',
            columns=['time','lat','lon','tide_oc_pole'], verbose=VERBOSE)
    elif (FORMAT == 'netCDF4'):
        pyTMD.spatial.to_netCDF4(output, attrib, output_file, verbose=VERBOSE)
    elif (FORMAT == 'HDF5'):
        pyTMD.spatial.to_HDF5(output, attrib, output_file, verbose=VERBOSE)
    elif (FORMAT == 'geotiff'):
        pyTMD.spatial.to_geotiff(output, attrib, output_file, verbose=VERBOSE,
            varname='tide_oc_pole')
    #-- change the permissions level to MODE
    os.chmod(output_file, MODE)
def compute_LPT_displacements(input_file, output_file,
    FORMAT='csv', VARIABLES=['time','lat','lon','data'], HEADER=0, TYPE='drift',
    TIME_UNITS='days since 1858-11-17T00:00:00', TIME=None, PROJECTION='4326',
    VERBOSE=False, MODE=0o775):

    #-- invalid value
    fill_value = -9999.0
    #-- output netCDF4 and HDF5 file attributes
    #-- will be added to YAML header in csv files
    attrib = {}
    #-- latitude
    attrib['lat'] = {}
    attrib['lat']['long_name'] = 'Latitude'
    attrib['lat']['units'] = 'Degrees_North'
    #-- longitude
    attrib['lon'] = {}
    attrib['lon']['long_name'] = 'Longitude'
    attrib['lon']['units'] = 'Degrees_East'
    #-- load pole tides
    attrib['tide_pole'] = {}
    attrib['tide_pole']['long_name'] = 'Solid_Earth_Pole_Tide'
    attrib['tide_pole']['description'] = ('Solid_Earth_pole_tide_radial_'
        'displacements_due_to_polar_motion')
    attrib['tide_pole']['reference'] = ('ftp://tai.bipm.org/iers/conv2010/'
        'chapter7/tn36_c7.pdf')
    attrib['tide_pole']['units'] = 'meters'
    attrib['tide_pole']['_FillValue'] = fill_value
    #-- time
    attrib['time'] = {}
    attrib['time']['long_name'] = 'Time'
    attrib['time']['units'] = 'days since 1858-11-17T00:00:00'
    attrib['time']['description'] = 'Modified Julian Days'
    attrib['time']['calendar'] = 'standard'

    #-- read input file to extract time, spatial coordinates and data
    if (FORMAT == 'csv'):
        dinput = pyTMD.spatial.from_ascii(input_file, columns=VARIABLES,
            header=HEADER, verbose=VERBOSE)
    elif (FORMAT == 'netCDF4'):
        dinput = pyTMD.spatial.from_netCDF4(input_file, timename=VARIABLES[0],
            xname=VARIABLES[2], yname=VARIABLES[1], varname=VARIABLES[3],
            verbose=VERBOSE)
    elif (FORMAT == 'HDF5'):
        dinput = pyTMD.spatial.from_HDF5(input_file, timename=VARIABLES[0],
            xname=VARIABLES[2], yname=VARIABLES[1], varname=VARIABLES[3],
            verbose=VERBOSE)
    elif (FORMAT == 'geotiff'):
        dinput = pyTMD.spatial.from_geotiff(input_file, verbose=VERBOSE)
        #-- copy global geotiff attributes for projection and grid parameters
        for att_name in ['projection','wkt','spacing','extent']:
            attrib[att_name] = dinput['attributes'][att_name]
    #-- update time variable if entered as argument
    if TIME is not None:
        dinput['time'] = np.copy(TIME)

    #-- converting x,y from projection to latitude/longitude
    #-- could try to extract projection attributes from netCDF4 and HDF5 files
    try:
        crs1 = pyproj.CRS.from_string("epsg:{0:d}".format(int(PROJECTION)))
    except (ValueError,pyproj.exceptions.CRSError):
        crs1 = pyproj.CRS.from_string(PROJECTION)
    crs2 = pyproj.CRS.from_string("epsg:{0:d}".format(4326))
    transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True)
    if (TYPE == 'grid'):
        ny,nx = (len(dinput['y']),len(dinput['x']))
        gridx,gridy = np.meshgrid(dinput['x'],dinput['y'])
        lon,lat = transformer.transform(gridx.flatten(),gridy.flatten())
    elif (TYPE == 'drift'):
        lon,lat = transformer.transform(dinput['x'].flatten(),
            dinput['y'].flatten())

    #-- extract time units from netCDF4 and HDF5 attributes or from TIME_UNITS
    try:
        time_string = dinput['attributes']['time']['units']
    except (TypeError, KeyError):
        epoch1,to_secs = pyTMD.time.parse_date_string(TIME_UNITS)
    else:
        epoch1,to_secs = pyTMD.time.parse_date_string(time_string)
    #-- convert dates to Modified Julian days (days since 1858-11-17T00:00:00)
    MJD = pyTMD.time.convert_delta_time(to_secs*dinput['time'].flatten(),
        epoch1=epoch1, epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0)
    #-- add offset to convert to Julian days and then convert to calendar dates
    Y,M,D,h,m,s = pyTMD.time.convert_julian(2400000.5 + MJD, FORMAT='tuple')
    #-- calculate time in year-decimal format
    time_decimal = pyTMD.time.convert_calendar_decimal(Y,M,day=D,
        hour=h,minute=m,second=s)
    #-- number of time points
    nt = len(time_decimal)

    #-- degrees to radians
    dtr = np.pi/180.0
    atr = np.pi/648000.0
    #-- earth and physical parameters (IERS and WGS84)
    GM = 3.986004418e14#-- geocentric gravitational constant [m^3/s^2]
    a_axis = 6378136.6#-- semimajor axis of the WGS84 ellipsoid [m]
    flat = 1.0/298.257223563#-- flattening of the WGS84 ellipsoid
    b_axis = (1.0 - flat)*a_axis#-- semiminor axis of the WGS84 ellipsoid [m]
    omega = 7.292115e-5#-- mean rotation rate of the Earth [radians/s]
    #-- tidal love number appropriate for the load tide
    hb2 = 0.6207
    #-- Linear eccentricity, first and second numerical eccentricity
    lin_ecc = np.sqrt((2.0*flat - flat**2)*a_axis**2)
    ecc1 = lin_ecc/a_axis
    ecc2 = lin_ecc/b_axis
    #-- m parameter [omega^2*a^2*b/(GM)]. p. 70, Eqn.(2-137)
    m = omega**2*((1 -flat)*a_axis**3)/GM
    #-- flattening components
    f_2 = -flat + (5.0/2.0)*m + (1.0/2.0)*flat**2.0 - (26.0/7.0)*flat*m + \
        (15.0/4.0)*m**2.0
    f_4 = -(1.0/2.0)*flat**2.0 + (5.0/2.0)*flat*m

    #-- flatten heights
    h = dinput['data'].flatten() if ('data' in dinput.keys()) else 0.0
    #-- convert from geodetic latitude to geocentric latitude
    #-- calculate X, Y and Z from geodetic latitude and longitude
    X,Y,Z = pyTMD.spatial.to_cartesian(lon,lat,h=h,a_axis=a_axis,flat=flat)
    rr = np.sqrt(X**2.0 + Y**2.0 + Z**2.0)
    #-- calculate geocentric latitude and convert to degrees
    latitude_geocentric = np.arctan(Z / np.sqrt(X**2.0 + Y**2.0))/dtr
    #-- geocentric colatitude and longitude in radians
    theta = dtr*(90.0 - latitude_geocentric)
    phi = lon*dtr

    #-- compute normal gravity at spatial location and elevation of points.
    #-- normal gravity at the equator. p. 79, Eqn.(2-186)
    gamma_a = (GM/(a_axis*b_axis)) * (1.0-(3.0/2.0)*m - (3.0/14.0)*ecc2**2.0*m)
    #-- Normal gravity. p. 80, Eqn.(2-199)
    gamma_0 = gamma_a*(1.0 + f_2*np.cos(theta)**2.0 +
        f_4*np.sin(np.pi*latitude_geocentric/180.0)**4.0)
    #-- Normal gravity at height h. p. 82, Eqn.(2-215)
    gamma_h = gamma_0*(1.0 - \
        (2.0/a_axis)*(1.0+flat+m-2.0*flat*np.cos(theta)**2.0)*h + \
        (3.0/a_axis**2.0)*h**2.0)

    #-- pole tide files (mean and daily)
    mean_pole_file = pyTMD.utilities.get_data_path(['data','mean-pole.tab'])
    pole_tide_file = pyTMD.utilities.get_data_path(['data','finals.all'])
    #-- calculate angular coordinates of mean pole at time
    mpx,mpy,fl = iers_mean_pole(mean_pole_file,time_decimal,'2015')
    #-- read IERS daily polar motion values
    EOP = read_iers_EOP(pole_tide_file)
    #-- interpolate daily polar motion values to MJD using cubic splines
    xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'],EOP['x'],k=3,s=0)
    ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'],EOP['y'],k=3,s=0)
    px = xSPL(MJD)
    py = ySPL(MJD)
    #-- calculate differentials from mean pole positions
    mx = px - mpx
    my = -(py - mpy)

    #-- calculate radial displacement at time
    dfactor = -hb2*atr*(omega**2*rr**2)/(2.0*gamma_h)
    Srad = np.ma.zeros((len(latitude_geocentric)),fill_value=fill_value)
    Srad.data[:] = dfactor*np.sin(2.0*theta)*(mx*np.cos(phi) + my*np.sin(phi))
    #-- replace fill values
    Srad.mask = np.isnan(Srad.data)
    Srad.data[Srad.mask] = Srad.fill_value

    #-- calculate radial displacement at time
    if (TYPE == 'grid'):
        Srad = np.ma.zeros((ny,nx,nt),fill_value=fill_value)
        Srad.mask = np.zeros((ny,nx,nt),dtype=bool)
        for i in range(nt):
            SRAD=dfactor*np.sin(2.0*theta)*(mx[i]*np.cos(phi)+my[i]*np.sin(phi))
            #-- reform grid
            Srad.data[:,:,i]=np.reshape(SRAD, (ny,nx))
            Srad.mask[:,:,i]=np.isnan(SRAD)
    elif (TYPE == 'drift'):
        Srad = np.ma.zeros((nt),fill_value=fill_value)
        Srad.data[:] = dfactor*np.sin(2.0*theta)*(mx*np.cos(phi)+my*np.sin(phi))
        Srad.mask = np.isnan(Srad.data)
    #-- replace invalid data with fill values
    Srad.data[Srad.mask] = Srad.fill_value

    #-- output to file
    output = dict(time=MJD,lon=lon,lat=lat,tide_pole=Srad)
    if (FORMAT == 'csv'):
        pyTMD.spatial.to_ascii(output, attrib, output_file, delimiter=',',
            columns=['time','lat','lon','tide_pole'], verbose=VERBOSE)
    elif (FORMAT == 'netCDF4'):
        pyTMD.spatial.to_netCDF4(output, attrib, output_file, verbose=VERBOSE)
    elif (FORMAT == 'HDF5'):
        pyTMD.spatial.to_HDF5(output, attrib, output_file, verbose=VERBOSE)
    elif (FORMAT == 'geotiff'):
        pyTMD.spatial.to_geotiff(output, attrib, output_file, verbose=VERBOSE,
            varname='tide_pole')
    #-- change the permissions level to MODE
    os.chmod(output_file, MODE)
Example #5
0
def compute_OPT_ICESat(FILE, METHOD=None, VERBOSE=False, MODE=0o775):

    #-- get directory from FILE
    print('{0} -->'.format(os.path.basename(FILE))) if VERBOSE else None
    DIRECTORY = os.path.dirname(FILE)

    #-- compile regular expression operator for extracting information from file
    rx = re.compile((r'GLAH(\d{2})_(\d{3})_(\d{1})(\d{1})(\d{2})_(\d{3})_'
                     r'(\d{4})_(\d{1})_(\d{2})_(\d{4})\.H5'), re.VERBOSE)
    #-- extract parameters from ICESat/GLAS HDF5 file name
    #-- PRD:  Product number (01, 05, 06, 12, 13, 14, or 15)
    #-- RL:  Release number for process that created the product = 634
    #-- RGTP:  Repeat ground-track phase (1=8-day, 2=91-day, 3=transfer orbit)
    #-- ORB:   Reference orbit number (starts at 1 and increments each time a
    #--           new reference orbit ground track file is obtained.)
    #-- INST:  Instance number (increments every time the satellite enters a
    #--           different reference orbit)
    #-- CYCL:   Cycle of reference orbit for this phase
    #-- TRK: Track within reference orbit
    #-- SEG:   Segment of orbit
    #-- GRAN:  Granule version number
    #-- TYPE:  File type
    PRD, RL, RGTP, ORB, INST, CYCL, TRK, SEG, GRAN, TYPE = rx.findall(
        FILE).pop()

    #-- read GLAH12 HDF5 file
    fileID = h5py.File(FILE, 'r')
    n_40HZ, = fileID['Data_40HZ']['Time']['i_rec_ndx'].shape
    #-- get variables and attributes
    rec_ndx_40HZ = fileID['Data_40HZ']['Time']['i_rec_ndx'][:].copy()
    #-- seconds since 2000-01-01 12:00:00 UTC (J2000)
    DS_UTCTime_40HZ = fileID['Data_40HZ']['DS_UTCTime_40'][:].copy()
    #-- Latitude (degrees North)
    lat_TPX = fileID['Data_40HZ']['Geolocation']['d_lat'][:].copy()
    #-- Longitude (degrees East)
    lon_40HZ = fileID['Data_40HZ']['Geolocation']['d_lon'][:].copy()
    #-- Elevation (height above TOPEX/Poseidon ellipsoid in meters)
    elev_TPX = fileID['Data_40HZ']['Elevation_Surfaces']['d_elev'][:].copy()
    fv = fileID['Data_40HZ']['Elevation_Surfaces']['d_elev'].attrs[
        '_FillValue']

    #-- convert time from UTC time of day to Modified Julian Days (MJD)
    #-- J2000: seconds since 2000-01-01 12:00:00 UTC
    t = DS_UTCTime_40HZ[:] / 86400.0 + 51544.5
    #-- convert from MJD to calendar dates
    YY, MM, DD, HH, MN, SS = pyTMD.time.convert_julian(t + 2400000.5,
                                                       FORMAT='tuple')
    #-- convert calendar dates into year decimal
    tdec = pyTMD.time.convert_calendar_decimal(YY,
                                               MM,
                                               day=DD,
                                               hour=HH,
                                               minute=MN,
                                               second=SS)

    #-- semimajor axis (a) and flattening (f) for TP and WGS84 ellipsoids
    atop, ftop = (6378136.3, 1.0 / 298.257)
    awgs, fwgs = (6378137.0, 1.0 / 298.257223563)
    #-- convert from Topex/Poseidon to WGS84 Ellipsoids
    lat_40HZ, elev_40HZ = pyTMD.spatial.convert_ellipsoid(lat_TPX,
                                                          elev_TPX,
                                                          atop,
                                                          ftop,
                                                          awgs,
                                                          fwgs,
                                                          eps=1e-12,
                                                          itmax=10)

    #-- degrees to radians and arcseconds to radians
    dtr = np.pi / 180.0
    atr = np.pi / 648000.0
    #-- earth and physical parameters (IERS)
    G = 6.67428e-11  #-- universal constant of gravitation [m^3/(kg*s^2)]
    GM = 3.986004418e14  #-- geocentric gravitational constant [m^3/s^2]
    ge = 9.7803278  #-- mean equatorial gravity [m/s^2]
    a_axis = 6378136.6  #-- equatorial radius of the Earth [m]
    flat = 1.0 / 298.257223563  #-- flattening of the ellipsoid
    omega = 7.292115e-5  #-- mean rotation rate of the Earth [radians/s]
    rho_w = 1025.0  #-- density of sea water [kg/m^3]
    ge = 9.7803278  #-- mean equatorial gravitational acceleration [m/s^2]
    #-- Linear eccentricity and first numerical eccentricity
    lin_ecc = np.sqrt((2.0 * flat - flat**2) * a_axis**2)
    ecc1 = lin_ecc / a_axis
    #-- tidal love number differential (1 + kl - hl) for pole tide frequencies
    gamma = 0.6870 + 0.0036j

    #-- convert from geodetic latitude to geocentric latitude
    #-- geodetic latitude in radians
    latitude_geodetic_rad = lat_40HZ * dtr
    #-- prime vertical radius of curvature
    N = a_axis / np.sqrt(1.0 - ecc1**2.0 * np.sin(latitude_geodetic_rad)**2.0)
    #-- calculate X, Y and Z from geodetic latitude and longitude
    X = (N + elev_40HZ) * np.cos(latitude_geodetic_rad) * np.cos(
        lon_40HZ * dtr)
    Y = (N + elev_40HZ) * np.cos(latitude_geodetic_rad) * np.sin(
        lon_40HZ * dtr)
    Z = (N * (1.0 - ecc1**2.0) + elev_40HZ) * np.sin(latitude_geodetic_rad)
    rr = np.sqrt(X**2.0 + Y**2.0 + Z**2.0)
    #-- calculate geocentric latitude and convert to degrees
    latitude_geocentric = np.arctan(Z / np.sqrt(X**2.0 + Y**2.0)) / dtr

    #-- pole tide displacement scale factor
    Hp = np.sqrt(8.0 * np.pi / 15.0) * (omega**2 * a_axis**4) / GM
    K = 4.0 * np.pi * G * rho_w * Hp * a_axis / (3.0 * ge)
    K1 = 4.0 * np.pi * G * rho_w * Hp * a_axis**3 / (3.0 * GM)

    #-- read ocean pole tide map from Desai (2002)
    ocean_pole_tide_file = get_data_path(['data', 'opoleloadcoefcmcor.txt.gz'])
    iur, iun, iue, ilon, ilat = read_ocean_pole_tide(ocean_pole_tide_file)

    #-- pole tide files (mean and daily)
    mean_pole_file = get_data_path(['data', 'mean-pole.tab'])
    pole_tide_file = get_data_path(['data', 'finals.all'])

    #-- read IERS daily polar motion values
    EOP = read_iers_EOP(pole_tide_file)
    #-- create cubic spline interpolations of daily polar motion values
    xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['x'], k=3, s=0)
    ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['y'], k=3, s=0)

    #-- interpolate ocean pole tide map from Desai (2002)
    if (METHOD == 'spline'):
        #-- use scipy bivariate splines to interpolate to output points
        f1 = scipy.interpolate.RectBivariateSpline(ilon,
                                                   ilat[::-1],
                                                   iur[:, ::-1].real,
                                                   kx=1,
                                                   ky=1)
        f2 = scipy.interpolate.RectBivariateSpline(ilon,
                                                   ilat[::-1],
                                                   iur[:, ::-1].imag,
                                                   kx=1,
                                                   ky=1)
        UR = np.zeros((n_40HZ), dtype=np.complex128)
        UR.real = f1.ev(lon_40HZ, latitude_geocentric)
        UR.imag = f2.ev(lon_40HZ, latitude_geocentric)
    else:
        #-- use scipy regular grid to interpolate values for a given method
        r1 = scipy.interpolate.RegularGridInterpolator((ilon, ilat[::-1]),
                                                       iur[:, ::-1],
                                                       method=METHOD)
        UR = r1.__call__(np.c_[lon_40HZ, latitude_geocentric])

    #-- calculate angular coordinates of mean pole at time tdec
    mpx, mpy, fl = iers_mean_pole(mean_pole_file, tdec, '2015')
    #-- interpolate daily polar motion values to t using cubic splines
    px = xSPL(t)
    py = ySPL(t)
    #-- calculate differentials from mean pole positions
    mx = px - mpx
    my = -(py - mpy)
    #-- calculate radial displacement at time
    Urad = np.ma.zeros((n_40HZ), fill_value=fv)
    Urad.data[:] = K * atr * np.real(
        (mx * gamma.real + my * gamma.imag) * UR.real +
        (my * gamma.real - mx * gamma.imag) * UR.imag)
    #-- replace fill values
    Urad.mask = np.isnan(Urad.data)
    Urad.data[Urad.mask] = Urad.fill_value

    #-- copy variables for outputting to HDF5 file
    IS_gla12_tide = dict(Data_40HZ={})
    IS_gla12_fill = dict(Data_40HZ={})
    IS_gla12_tide_attrs = dict(Data_40HZ={})

    #-- copy global file attributes
    global_attribute_list = [
        'featureType', 'title', 'comment', 'summary', 'license', 'references',
        'AccessConstraints', 'CitationforExternalPublication',
        'contributor_role', 'contributor_name', 'creator_name',
        'creator_email', 'publisher_name', 'publisher_email', 'publisher_url',
        'platform', 'instrument', 'processing_level', 'date_created',
        'spatial_coverage_type', 'history', 'keywords', 'keywords_vocabulary',
        'naming_authority', 'project', 'time_type', 'date_type',
        'time_coverage_start', 'time_coverage_end', 'time_coverage_duration',
        'source', 'HDFVersion', 'identifier_product_type',
        'identifier_product_format_version', 'Conventions', 'institution',
        'ReprocessingPlanned', 'ReprocessingActual', 'LocalGranuleID',
        'ProductionDateTime', 'LocalVersionID', 'PGEVersion', 'OrbitNumber',
        'StartOrbitNumber', 'StopOrbitNumber', 'EquatorCrossingLongitude',
        'EquatorCrossingTime', 'EquatorCrossingDate', 'ShortName', 'VersionID',
        'InputPointer', 'RangeBeginningTime', 'RangeEndingTime',
        'RangeBeginningDate', 'RangeEndingDate', 'PercentGroundHit',
        'OrbitQuality', 'Cycle', 'Track', 'Instrument_State', 'Timing_Bias',
        'ReferenceOrbit', 'SP_ICE_PATH_NO', 'SP_ICE_GLAS_StartBlock',
        'SP_ICE_GLAS_EndBlock', 'Instance', 'Range_Bias',
        'Instrument_State_Date', 'Instrument_State_Time', 'Range_Bias_Date',
        'Range_Bias_Time', 'Timing_Bias_Date', 'Timing_Bias_Time',
        'identifier_product_doi', 'identifier_file_uuid',
        'identifier_product_doi_authority'
    ]
    for att in global_attribute_list:
        IS_gla12_tide_attrs[att] = fileID.attrs[att]

    #-- add attributes for input GLA12 file
    IS_gla12_tide_attrs['input_files'] = os.path.basename(FILE)
    #-- update geospatial ranges for ellipsoid
    IS_gla12_tide_attrs['geospatial_lat_min'] = np.min(lat_40HZ)
    IS_gla12_tide_attrs['geospatial_lat_max'] = np.max(lat_40HZ)
    IS_gla12_tide_attrs['geospatial_lon_min'] = np.min(lon_40HZ)
    IS_gla12_tide_attrs['geospatial_lon_max'] = np.max(lon_40HZ)
    IS_gla12_tide_attrs['geospatial_lat_units'] = "degrees_north"
    IS_gla12_tide_attrs['geospatial_lon_units'] = "degrees_east"
    IS_gla12_tide_attrs['geospatial_ellipsoid'] = "WGS84"

    #-- copy 40Hz group attributes
    for att_name, att_val in fileID['Data_40HZ'].attrs.items():
        IS_gla12_tide_attrs['Data_40HZ'][att_name] = att_val
    #-- copy attributes for time, geolocation and geophysical groups
    for var in ['Time', 'Geolocation', 'Geophysical']:
        IS_gla12_tide['Data_40HZ'][var] = {}
        IS_gla12_fill['Data_40HZ'][var] = {}
        IS_gla12_tide_attrs['Data_40HZ'][var] = {}
        for att_name, att_val in fileID['Data_40HZ'][var].attrs.items():
            IS_gla12_tide_attrs['Data_40HZ'][var][att_name] = att_val

    #-- J2000 time
    IS_gla12_tide['Data_40HZ']['DS_UTCTime_40'] = DS_UTCTime_40HZ
    IS_gla12_fill['Data_40HZ']['DS_UTCTime_40'] = None
    IS_gla12_tide_attrs['Data_40HZ']['DS_UTCTime_40'] = {}
    for att_name, att_val in fileID['Data_40HZ']['DS_UTCTime_40'].attrs.items(
    ):
        if att_name not in ('DIMENSION_LIST', 'CLASS', 'NAME'):
            IS_gla12_tide_attrs['Data_40HZ']['DS_UTCTime_40'][
                att_name] = att_val
    #-- record
    IS_gla12_tide['Data_40HZ']['Time']['i_rec_ndx'] = rec_ndx_40HZ
    IS_gla12_fill['Data_40HZ']['Time']['i_rec_ndx'] = None
    IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx'] = {}
    for att_name, att_val in fileID['Data_40HZ']['Time'][
            'i_rec_ndx'].attrs.items():
        if att_name not in ('DIMENSION_LIST', 'CLASS', 'NAME'):
            IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx'][
                att_name] = att_val
    #-- latitude
    IS_gla12_tide['Data_40HZ']['Geolocation']['d_lat'] = lat_40HZ
    IS_gla12_fill['Data_40HZ']['Geolocation']['d_lat'] = None
    IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat'] = {}
    for att_name, att_val in fileID['Data_40HZ']['Geolocation'][
            'd_lat'].attrs.items():
        if att_name not in ('DIMENSION_LIST', 'CLASS', 'NAME'):
            IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat'][
                att_name] = att_val
    #-- longitude
    IS_gla12_tide['Data_40HZ']['Geolocation']['d_lon'] = lon_40HZ
    IS_gla12_fill['Data_40HZ']['Geolocation']['d_lon'] = None
    IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon'] = {}
    for att_name, att_val in fileID['Data_40HZ']['Geolocation'][
            'd_lon'].attrs.items():
        if att_name not in ('DIMENSION_LIST', 'CLASS', 'NAME'):
            IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon'][
                att_name] = att_val

    #-- geophysical variables
    #-- computed ocean pole tide
    IS_gla12_tide['Data_40HZ']['Geophysical']['d_opElv'] = Urad
    IS_gla12_fill['Data_40HZ']['Geophysical']['d_opElv'] = Urad.fill_value
    IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_opElv'] = {}
    IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_opElv'][
        'units'] = "meters"
    IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_opElv']['long_name'] = \
        "Ocean Pole Tide"
    IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_opElv'][
        'description'] = ("Ocean "
                          "pole tide radial displacements due to polar motion")
    IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_opElv']['reference'] = \
        'ftp://tai.bipm.org/iers/conv2010/chapter7/opoleloadcoefcmcor.txt.gz'
    IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_opElv']['coordinates'] = \
        "../DS_UTCTime_40"

    #-- close the input HDF5 file
    fileID.close()

    #-- output tidal HDF5 file
    args = (PRD, RL, RGTP, ORB, INST, CYCL, TRK, SEG, GRAN, TYPE)
    file_format = 'GLAH{0}_{1}_OPT_{2}{3}{4}_{5}_{6}_{7}_{8}_{9}.h5'
    #-- print file information
    print('\t{0}'.format(file_format.format(*args))) if VERBOSE else None
    HDF5_GLA12_tide_write(IS_gla12_tide,
                          IS_gla12_tide_attrs,
                          FILENAME=os.path.join(DIRECTORY,
                                                file_format.format(*args)),
                          FILL_VALUE=IS_gla12_fill,
                          CLOBBER=True)
    #-- change the permissions mode
    os.chmod(os.path.join(DIRECTORY, file_format.format(*args)), MODE)
Example #6
0
def compute_OPT_displacements(tide_dir,
                              input_file,
                              output_file,
                              METHOD=None,
                              VERBOSE=False,
                              MODE=0775):

    #-- read input *.csv file to extract MJD, latitude, longitude and elevation
    dtype = dict(names=('MJD', 'lat', 'lon', 'h'),
                 formats=('f', 'f', 'f', 'f'))
    dinput = np.loadtxt(input_file, delimiter=',', dtype=dtype)
    file_lines, = np.shape(dinput['h'])
    #-- convert from MJD to calendar dates, then to year-decimal
    YY, MM, DD, HH, MN, SS = convert_julian(dinput['MJD'] + 2400000.5,
                                            FORMAT='tuple')
    tdec = convert_calendar_decimal(YY,
                                    MM,
                                    DAY=DD,
                                    HOUR=HH,
                                    MINUTE=MN,
                                    SECOND=SS)

    #-- degrees to radians and arcseconds to radians
    dtr = np.pi / 180.0
    atr = np.pi / 648000.0
    #-- earth and physical parameters (IERS and WGS84)
    G = 6.67428e-11  #-- universal constant of gravitation [m^3/(kg*s^2)]
    GM = 3.98004418e14  #-- geocentric gravitational constant [m^3/s^2]
    a_axis = 6378136.6  #-- WGS84 equatorial radius of the Earth [m]
    flat = 1.0 / 298.257223563  #-- flattening of the WGS84 ellipsoid
    omega = 7.292115e-5  #-- mean rotation rate of the Earth [radians/s]
    rho_w = 1025.0  #-- density of sea water [kg/m^3]
    #-- Linear eccentricity and first numerical eccentricity
    lin_ecc = np.sqrt((2.0 * flat - flat**2) * a_axis**2)
    ecc1 = lin_ecc / a_axis
    #-- tidal love number differential (1 + kl - hl) for pole tide frequencies
    gamma = 0.6870 + 0.0036j

    #-- convert from geodetic latitude to geocentric latitude
    #-- geodetic latitude in radians
    latitude_geodetic_rad = dinput['lat'] * dtr
    #-- prime vertical radius of curvature
    N = a_axis / np.sqrt(1.0 - ecc1**2.0 * np.sin(latitude_geodetic_rad)**2.0)
    #-- calculate X, Y and Z from geodetic latitude and longitude
    X = (N + dinput['h']) * np.cos(latitude_geodetic_rad) * np.cos(
        dinput['lon'] * dtr)
    Y = (N + dinput['h']) * np.cos(latitude_geodetic_rad) * np.sin(
        dinput['lon'] * dtr)
    Z = (N * (1.0 - ecc1**2.0) + dinput['h']) * np.sin(latitude_geodetic_rad)
    #-- calculate geocentric latitude and convert to degrees
    latitude_geocentric = np.arctan(Z / np.sqrt(X**2.0 + Y**2.0)) / dtr

    #-- pole tide displacement scale factor
    Hp = np.sqrt(8.0 * np.pi / 15.0) * (omega**2 * a_axis**4) / GM
    K = 4.0 * np.pi * G * rho_w * Hp * a_axis**3 / (3.0 * GM)

    #-- pole tide files (mean and daily)
    mean_pole_file = os.path.join(tide_dir, 'mean_pole_2017-10-23.tab')
    pole_tide_file = os.path.join(tide_dir, 'finals_all_2017-09-01.tab')
    #-- calculate angular coordinates of mean pole at time tdec
    mpx, mpy, fl = iers_mean_pole(mean_pole_file, tdec, '2015')
    #-- read IERS daily polar motion values
    EOP = read_iers_EOP(pole_tide_file)
    #-- interpolate daily polar motion values to t1 using cubic splines
    xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['x'], k=3, s=0)
    ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['y'], k=3, s=0)
    px = xSPL(dinput['MJD'])
    py = ySPL(dinput['MJD'])
    #-- calculate differentials from mean pole positions
    mx = px - mpx
    my = -(py - mpy)

    #-- read ocean pole tide map from Desai (2002)
    ocean_pole_tide_file = os.path.join(tide_dir, 'opoleloadcoefcmcor.txt.gz')
    iur, ilon, ilat = read_ocean_pole_tide(ocean_pole_tide_file)
    #-- interpolate ocean pole tide map from Desai (2002)
    if (METHOD == 'spline'):
        #-- use scipy bivariate splines to interpolate to output points
        f1 = scipy.interpolate.RectBivariateSpline(ilon,
                                                   ilat[::-1],
                                                   iur[:, ::-1].real,
                                                   kx=1,
                                                   ky=1)
        f2 = scipy.interpolate.RectBivariateSpline(ilon,
                                                   ilat[::-1],
                                                   iur[:, ::-1].imag,
                                                   kx=1,
                                                   ky=1)
        UR = np.zeros((file_lines), dtype=np.complex128)
        UR.real = f1.ev(dinput['lon'], latitude_geocentric)
        UR.imag = f2.ev(dinput['lon'], latitude_geocentric)
    else:
        #-- create mesh grids of latitude and longitude
        gridlon, gridlat = np.meshgrid(ilon, ilat, indexing='ij')
        interp_points = zip(gridlon.flatten(), gridlat.flatten())
        #-- use scipy griddata to interpolate to output points
        UR = scipy.interpolate.griddata(interp_points,
                                        iur.flatten(),
                                        zip(dinput['lon'],
                                            latitude_geocentric),
                                        method=METHOD)

    #-- calculate radial displacement at time
    Urad = K * atr * np.real((mx * gamma.real + my * gamma.imag) * UR.real +
                             (my * gamma.real - mx * gamma.imag) * UR.imag)

    #-- output to file
    with open(output_file) as f:
        for d, lt, ln, u in zip(dinput['MJD'], dinput['lat'], dinput['lon'],
                                Urad):
            print('{0:g},{1:g},{2:g},{3:f}'.format(d, lt, ln, u), file=f)
    #-- change the permissions level to MODE
    os.chmod(output_file, MODE)
def compute_LPT_ICESat(FILE, VERBOSE=False, MODE=0o775):

    #-- get directory from FILE
    print('{0} -->'.format(os.path.basename(FILE))) if VERBOSE else None
    DIRECTORY = os.path.dirname(FILE)

    #-- compile regular expression operator for extracting information from file
    rx = re.compile((r'GLAH(\d{2})_(\d{3})_(\d{1})(\d{1})(\d{2})_(\d{3})_'
                     r'(\d{4})_(\d{1})_(\d{2})_(\d{4})\.H5'), re.VERBOSE)
    #-- extract parameters from ICESat/GLAS HDF5 file name
    #-- PRD:  Product number (01, 05, 06, 12, 13, 14, or 15)
    #-- RL:  Release number for process that created the product = 634
    #-- RGTP:  Repeat ground-track phase (1=8-day, 2=91-day, 3=transfer orbit)
    #-- ORB:   Reference orbit number (starts at 1 and increments each time a
    #--           new reference orbit ground track file is obtained.)
    #-- INST:  Instance number (increments every time the satellite enters a
    #--           different reference orbit)
    #-- CYCL:   Cycle of reference orbit for this phase
    #-- TRK: Track within reference orbit
    #-- SEG:   Segment of orbit
    #-- GRAN:  Granule version number
    #-- TYPE:  File type
    PRD, RL, RGTP, ORB, INST, CYCL, TRK, SEG, GRAN, TYPE = rx.findall(
        FILE).pop()

    #-- read GLAH12 HDF5 file
    fileID = h5py.File(FILE, 'r')
    n_40HZ, = fileID['Data_40HZ']['Time']['i_rec_ndx'].shape
    #-- get variables and attributes
    rec_ndx_40HZ = fileID['Data_40HZ']['Time']['i_rec_ndx'][:].copy()
    #-- seconds since 2000-01-01 12:00:00 UTC (J2000)
    DS_UTCTime_40HZ = fileID['Data_40HZ']['DS_UTCTime_40'][:].copy()
    #-- Latitude (degrees North)
    lat_TPX = fileID['Data_40HZ']['Geolocation']['d_lat'][:].copy()
    #-- Longitude (degrees East)
    lon_40HZ = fileID['Data_40HZ']['Geolocation']['d_lon'][:].copy()
    #-- Elevation (height above TOPEX/Poseidon ellipsoid in meters)
    elev_TPX = fileID['Data_40HZ']['Elevation_Surfaces']['d_elev'][:].copy()
    fv = fileID['Data_40HZ']['Elevation_Surfaces']['d_elev'].attrs[
        '_FillValue']

    #-- convert time from UTC time of day to Modified Julian Days (MJD)
    #-- J2000: seconds since 2000-01-01 12:00:00 UTC
    t = DS_UTCTime_40HZ[:] / 86400.0 + 51544.5
    #-- convert from MJD to calendar dates
    YY, MM, DD, HH, MN, SS = pyTMD.time.convert_julian(t + 2400000.5,
                                                       FORMAT='tuple')
    #-- convert calendar dates into year decimal
    tdec = pyTMD.time.convert_calendar_decimal(YY,
                                               MM,
                                               day=DD,
                                               hour=HH,
                                               minute=MN,
                                               second=SS)

    #-- semimajor axis (a) and flattening (f) for TP and WGS84 ellipsoids
    atop, ftop = (6378136.3, 1.0 / 298.257)
    awgs, fwgs = (6378137.0, 1.0 / 298.257223563)
    #-- convert from Topex/Poseidon to WGS84 Ellipsoids
    lat_40HZ, elev_40HZ = pyTMD.spatial.convert_ellipsoid(lat_TPX,
                                                          elev_TPX,
                                                          atop,
                                                          ftop,
                                                          awgs,
                                                          fwgs,
                                                          eps=1e-12,
                                                          itmax=10)

    #-- degrees to radians
    dtr = np.pi / 180.0
    atr = np.pi / 648000.0
    #-- earth and physical parameters (IERS and WGS84)
    G = 6.67428e-11  #-- universal constant of gravitation [m^3/(kg*s^2)]
    GM = 3.986004418e14  #-- geocentric gravitational constant [m^3/s^2]
    ge = 9.7803278  #-- mean equatorial gravity [m/s^2]
    a_axis = 6378136.6  #-- semimajor axis of the WGS84 ellipsoid [m]
    flat = 1.0 / 298.257223563  #-- flattening of the WGS84 ellipsoid
    b_axis = (1.0 -
              flat) * a_axis  #-- semiminor axis of the WGS84 ellipsoid [m]
    omega = 7.292115e-5  #-- mean rotation rate of the Earth [radians/s]
    #-- tidal love number appropriate for the load tide
    hb2 = 0.6207
    #-- Linear eccentricity, first and second numerical eccentricity
    lin_ecc = np.sqrt((2.0 * flat - flat**2) * a_axis**2)
    ecc1 = lin_ecc / a_axis
    ecc2 = lin_ecc / b_axis
    #-- m parameter [omega^2*a^2*b/(GM)]. p. 70, Eqn.(2-137)
    m = omega**2 * ((1 - flat) * a_axis**3) / GM
    #-- flattening components
    f_2 = -flat + (5.0/2.0)*m + (1.0/2.0)*flat**2.0 - (26.0/7.0)*flat*m + \
        (15.0/4.0)*m**2.0
    f_4 = -(1.0 / 2.0) * flat**2.0 + (5.0 / 2.0) * flat * m

    #-- convert from geodetic latitude to geocentric latitude
    #-- geodetic latitude in radians
    latitude_geodetic_rad = lat_40HZ * dtr
    #-- prime vertical radius of curvature
    N = a_axis / np.sqrt(1.0 - ecc1**2.0 * np.sin(latitude_geodetic_rad)**2.0)
    #-- calculate X, Y and Z from geodetic latitude and longitude
    X = (N + elev_40HZ) * np.cos(latitude_geodetic_rad) * np.cos(
        lon_40HZ * dtr)
    Y = (N + elev_40HZ) * np.cos(latitude_geodetic_rad) * np.sin(
        lon_40HZ * dtr)
    Z = (N * (1.0 - ecc1**2.0) + elev_40HZ) * np.sin(latitude_geodetic_rad)
    rr = np.sqrt(X**2.0 + Y**2.0 + Z**2.0)
    #-- calculate geocentric latitude and convert to degrees
    latitude_geocentric = np.arctan(Z / np.sqrt(X**2.0 + Y**2.0)) / dtr
    #-- colatitude and longitude in radians
    theta = dtr * (90.0 - latitude_geocentric)
    phi = lon_40HZ * dtr

    #-- compute normal gravity at spatial location and elevation of points.
    #-- normal gravity at the equator. p. 79, Eqn.(2-186)
    gamma_a = (GM / (a_axis * b_axis)) * (1.0 - (3.0 / 2.0) * m -
                                          (3.0 / 14.0) * ecc2**2.0 * m)
    #-- Normal gravity. p. 80, Eqn.(2-199)
    gamma_0 = gamma_a * (1.0 + f_2 * np.cos(theta)**2.0 + f_4 *
                         np.sin(np.pi * latitude_geocentric / 180.0)**4.0)
    #-- Normal gravity at height h. p. 82, Eqn.(2-215)
    gamma_h = gamma_0*(1.0 -
        (2.0/a_axis)*(1.0+flat+m-2.0*flat*np.cos(theta)**2.0)*elev_40HZ + \
        (3.0/a_axis**2.0)*elev_40HZ**2.0)

    #-- pole tide files (mean and daily)
    mean_pole_file = get_data_path(['data', 'mean-pole.tab'])
    pole_tide_file = get_data_path(['data', 'finals.all'])
    #-- read IERS daily polar motion values
    EOP = read_iers_EOP(pole_tide_file)
    #-- create cubic spline interpolations of daily polar motion values
    xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['x'], k=3, s=0)
    ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['y'], k=3, s=0)

    #-- calculate angular coordinates of mean pole at time tdec
    mpx, mpy, fl = iers_mean_pole(mean_pole_file, tdec, '2015')
    #-- interpolate daily polar motion values to time using cubic splines
    px = xSPL(t)
    py = ySPL(t)
    #-- calculate differentials from mean pole positions
    mx = px - mpx
    my = -(py - mpy)
    #-- calculate radial displacement at time
    dfactor = -hb2 * atr * (omega**2 * rr**2) / (2.0 * gamma_h)
    Srad = np.ma.zeros((n_40HZ), fill_value=fv)
    Srad.data[:] = dfactor * np.sin(
        2.0 * theta) * (mx * np.cos(phi) + my * np.sin(phi))
    #-- replace fill values
    Srad.mask = np.isnan(Srad.data)
    Srad.data[Srad.mask] = Srad.fill_value

    #-- copy variables for outputting to HDF5 file
    IS_gla12_tide = dict(Data_40HZ={})
    IS_gla12_fill = dict(Data_40HZ={})
    IS_gla12_tide_attrs = dict(Data_40HZ={})

    #-- copy global file attributes
    global_attribute_list = [
        'featureType', 'title', 'comment', 'summary', 'license', 'references',
        'AccessConstraints', 'CitationforExternalPublication',
        'contributor_role', 'contributor_name', 'creator_name',
        'creator_email', 'publisher_name', 'publisher_email', 'publisher_url',
        'platform', 'instrument', 'processing_level', 'date_created',
        'spatial_coverage_type', 'history', 'keywords', 'keywords_vocabulary',
        'naming_authority', 'project', 'time_type', 'date_type',
        'time_coverage_start', 'time_coverage_end', 'time_coverage_duration',
        'source', 'HDFVersion', 'identifier_product_type',
        'identifier_product_format_version', 'Conventions', 'institution',
        'ReprocessingPlanned', 'ReprocessingActual', 'LocalGranuleID',
        'ProductionDateTime', 'LocalVersionID', 'PGEVersion', 'OrbitNumber',
        'StartOrbitNumber', 'StopOrbitNumber', 'EquatorCrossingLongitude',
        'EquatorCrossingTime', 'EquatorCrossingDate', 'ShortName', 'VersionID',
        'InputPointer', 'RangeBeginningTime', 'RangeEndingTime',
        'RangeBeginningDate', 'RangeEndingDate', 'PercentGroundHit',
        'OrbitQuality', 'Cycle', 'Track', 'Instrument_State', 'Timing_Bias',
        'ReferenceOrbit', 'SP_ICE_PATH_NO', 'SP_ICE_GLAS_StartBlock',
        'SP_ICE_GLAS_EndBlock', 'Instance', 'Range_Bias',
        'Instrument_State_Date', 'Instrument_State_Time', 'Range_Bias_Date',
        'Range_Bias_Time', 'Timing_Bias_Date', 'Timing_Bias_Time',
        'identifier_product_doi', 'identifier_file_uuid',
        'identifier_product_doi_authority'
    ]
    for att in global_attribute_list:
        IS_gla12_tide_attrs[att] = fileID.attrs[att]

    #-- add attributes for input GLA12 file
    IS_gla12_tide_attrs['input_files'] = os.path.basename(FILE)
    #-- update geospatial ranges for ellipsoid
    IS_gla12_tide_attrs['geospatial_lat_min'] = np.min(lat_40HZ)
    IS_gla12_tide_attrs['geospatial_lat_max'] = np.max(lat_40HZ)
    IS_gla12_tide_attrs['geospatial_lon_min'] = np.min(lon_40HZ)
    IS_gla12_tide_attrs['geospatial_lon_max'] = np.max(lon_40HZ)
    IS_gla12_tide_attrs['geospatial_lat_units'] = "degrees_north"
    IS_gla12_tide_attrs['geospatial_lon_units'] = "degrees_east"
    IS_gla12_tide_attrs['geospatial_ellipsoid'] = "WGS84"

    #-- copy 40Hz group attributes
    for att_name, att_val in fileID['Data_40HZ'].attrs.items():
        IS_gla12_tide_attrs['Data_40HZ'][att_name] = att_val
    #-- copy attributes for time, geolocation and geophysical groups
    for var in ['Time', 'Geolocation', 'Geophysical']:
        IS_gla12_tide['Data_40HZ'][var] = {}
        IS_gla12_fill['Data_40HZ'][var] = {}
        IS_gla12_tide_attrs['Data_40HZ'][var] = {}
        for att_name, att_val in fileID['Data_40HZ'][var].attrs.items():
            IS_gla12_tide_attrs['Data_40HZ'][var][att_name] = att_val

    #-- J2000 time
    IS_gla12_tide['Data_40HZ']['DS_UTCTime_40'] = DS_UTCTime_40HZ
    IS_gla12_fill['Data_40HZ']['DS_UTCTime_40'] = None
    IS_gla12_tide_attrs['Data_40HZ']['DS_UTCTime_40'] = {}
    for att_name, att_val in fileID['Data_40HZ']['DS_UTCTime_40'].attrs.items(
    ):
        if att_name not in ('DIMENSION_LIST', 'CLASS', 'NAME'):
            IS_gla12_tide_attrs['Data_40HZ']['DS_UTCTime_40'][
                att_name] = att_val
    #-- record
    IS_gla12_tide['Data_40HZ']['Time']['i_rec_ndx'] = rec_ndx_40HZ
    IS_gla12_fill['Data_40HZ']['Time']['i_rec_ndx'] = None
    IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx'] = {}
    for att_name, att_val in fileID['Data_40HZ']['Time'][
            'i_rec_ndx'].attrs.items():
        if att_name not in ('DIMENSION_LIST', 'CLASS', 'NAME'):
            IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx'][
                att_name] = att_val
    #-- latitude
    IS_gla12_tide['Data_40HZ']['Geolocation']['d_lat'] = lat_40HZ
    IS_gla12_fill['Data_40HZ']['Geolocation']['d_lat'] = None
    IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat'] = {}
    for att_name, att_val in fileID['Data_40HZ']['Geolocation'][
            'd_lat'].attrs.items():
        if att_name not in ('DIMENSION_LIST', 'CLASS', 'NAME'):
            IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat'][
                att_name] = att_val
    #-- longitude
    IS_gla12_tide['Data_40HZ']['Geolocation']['d_lon'] = lon_40HZ
    IS_gla12_fill['Data_40HZ']['Geolocation']['d_lon'] = None
    IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon'] = {}
    for att_name, att_val in fileID['Data_40HZ']['Geolocation'][
            'd_lon'].attrs.items():
        if att_name not in ('DIMENSION_LIST', 'CLASS', 'NAME'):
            IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon'][
                att_name] = att_val

    #-- geophysical variables
    #-- computed Solid Earth load pole tide
    IS_gla12_tide['Data_40HZ']['Geophysical']['d_poElv'] = Srad
    IS_gla12_fill['Data_40HZ']['Geophysical']['d_poElv'] = Srad.fill_value
    IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_poElv'] = {}
    IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_poElv'][
        'units'] = "meters"
    IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_poElv']['long_name'] = \
        "Solid Earth Pole Tide"
    IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_poElv'][
        'description'] = (
            "Solid "
            "Earth pole tide radial displacements due to polar motion")
    IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_poElv']['reference'] = \
        'ftp://tai.bipm.org/iers/conv2010/chapter7/tn36_c7.pdf'
    IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_poElv']['coordinates'] = \
        "../DS_UTCTime_40"

    #-- close the input HDF5 file
    fileID.close()

    #-- output tidal HDF5 file
    args = (PRD, RL, RGTP, ORB, INST, CYCL, TRK, SEG, GRAN, TYPE)
    file_format = 'GLAH{0}_{1}_LPT_{2}{3}{4}_{5}_{6}_{7}_{8}_{9}.h5'
    #-- print file information
    print('\t{0}'.format(file_format.format(*args))) if VERBOSE else None
    HDF5_GLA12_tide_write(IS_gla12_tide,
                          IS_gla12_tide_attrs,
                          FILENAME=os.path.join(DIRECTORY,
                                                file_format.format(*args)),
                          FILL_VALUE=IS_gla12_fill,
                          CLOBBER=True)
    #-- change the permissions mode
    os.chmod(os.path.join(DIRECTORY, file_format.format(*args)), MODE)
def compute_LPT_icebridge_data(tide_dir, arg, VERBOSE=False, MODE=0o775):

    #-- extract file name and subsetter indices lists
    match_object = re.match('(.*?)(\[(.*?)\])?$', arg)
    input_file = os.path.expanduser(match_object.group(1))
    #-- subset input file to indices
    if match_object.group(2):
        #-- decompress ranges and add to list
        input_subsetter = []
        for i in re.findall('((\d+)-(\d+)|(\d+))', match_object.group(3)):
            input_subsetter.append(int(i[3])) if i[3] else \
             input_subsetter.extend(range(int(i[1]),int(i[2])+1))
    else:
        input_subsetter = None

    #-- output directory for input_file
    DIRECTORY = os.path.dirname(input_file)
    #-- calculate if input files are from ATM or LVIS (+GH)
    regex = {}
    regex['ATM'] = '(BLATM2|ILATM2)_(\d+)_(\d+)_smooth_nadir(.*?)(csv|seg|pt)$'
    regex['ATM1b'] = '(BLATM1b|ILATM1b)_(\d+)_(\d+)(.*?).(qi|TXT|h5)$'
    regex['LVIS'] = '(BLVIS2|BVLIS2|ILVIS2)_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5$'
    regex['LVGH'] = '(ILVGH2)_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5$'
    for key, val in regex.items():
        if re.match(val, os.path.basename(input_file)):
            OIB = key

    #-- HDF5 file attributes
    attrib = {}
    #-- latitude
    attrib['lat'] = {}
    attrib['lat']['long_name'] = 'Latitude_of_measurement'
    attrib['lat']['description'] = ('Corresponding_to_the_measurement_'
                                    'position_at_the_acquisition_time')
    attrib['lat']['units'] = 'Degrees_North'
    #-- longitude
    attrib['lon'] = {}
    attrib['lon']['long_name'] = 'Longitude_of_measurement'
    attrib['lon']['description'] = ('Corresponding_to_the_measurement_'
                                    'position_at_the_acquisition_time')
    attrib['lon']['units'] = 'Degrees_East'
    #-- load pole tides
    attrib['tide_pole'] = {}
    attrib['tide_pole']['long_name'] = 'Solid_Earth_Pole_Tide'
    attrib['tide_pole']['description'] = (
        'Solid_Earth_pole_tide_radial_'
        'displacements_at_the_measurement_position_at_the_acquisition_'
        'time_due_to_polar_motion')
    attrib['tide_pole']['reference'] = ('ftp://tai.bipm.org/iers/conv2010/'
                                        'chapter7/opoleloadcoefcmcor.txt.gz')
    attrib['tide_pole']['units'] = 'meters'
    #-- Modified Julian Days
    attrib['MJD'] = {}
    attrib['MJD']['long_name'] = 'Time'
    attrib['MJD']['description'] = 'Modified Julian Days'
    attrib['MJD']['units'] = 'Days'

    #-- extract information from first input file
    #-- acquisition year, month and day
    #-- number of points
    #-- instrument (PRE-OIB ATM or LVIS, OIB ATM or LVIS)
    if OIB in ('ATM', 'ATM1b'):
        M1, YYMMDD1, HHMMSS1, AX1, SF1 = re.findall(regex[OIB],
                                                    input_file).pop()
        #-- early date strings omitted century and millenia (e.g. 93 for 1993)
        if (len(YYMMDD1) == 6):
            ypre, MM1, DD1 = YYMMDD1[:2], YYMMDD1[2:4], YYMMDD1[4:]
            if (np.float(ypre) >= 90):
                YY1 = '{0:4.0f}'.format(np.float(ypre) + 1900.0)
            else:
                YY1 = '{0:4.0f}'.format(np.float(ypre) + 2000.0)
        elif (len(YYMMDD1) == 8):
            YY1, MM1, DD1 = YYMMDD1[:4], YYMMDD1[4:6], YYMMDD1[6:]
    elif OIB in ('LVIS', 'LVGH'):
        M1, RG1, YY1, MMDD1, RLD1, SS1 = re.findall(regex[OIB],
                                                    input_file).pop()
        MM1, DD1 = MMDD1[:2], MMDD1[2:]

    #-- read data from input_file
    print('{0} -->'.format(input_file)) if VERBOSE else None
    if (OIB == 'ATM'):
        #-- load IceBridge ATM data from input_file
        dinput, file_lines, HEM = read_ATM_icessn_file(input_file,
                                                       input_subsetter)
    elif (OIB == 'ATM1b'):
        #-- load IceBridge Level-1b ATM data from input_file
        dinput, file_lines, HEM = read_ATM_qfit_file(input_file,
                                                     input_subsetter)
    elif OIB in ('LVIS', 'LVGH'):
        #-- load IceBridge LVIS data from input_file
        dinput, file_lines, HEM = read_LVIS_HDF5_file(input_file,
                                                      input_subsetter)

    #-- extract lat/lon
    lon = dinput['lon'][:]
    lat = dinput['lat'][:]
    #-- convert time from UTC time of day to modified julian days (MJD)
    #-- J2000: seconds since 2000-01-01 12:00:00 UTC
    t = dinput['time'][:] / 86400.0 + 51544.5
    #-- convert from MJD to calendar dates
    YY, MM, DD, HH, MN, SS = convert_julian(t + 2400000.5, FORMAT='tuple')
    #-- convert calendar dates into year decimal
    tdec = convert_calendar_decimal(YY,
                                    MM,
                                    DAY=DD,
                                    HOUR=HH,
                                    MINUTE=MN,
                                    SECOND=SS)
    #-- elevation
    h1 = dinput['data'][:]

    #-- degrees to radians
    dtr = np.pi / 180.0
    atr = np.pi / 648000.0
    #-- earth and physical parameters (IERS and WGS84)
    G = 6.67428e-11  #-- universal constant of gravitation [m^3/(kg*s^2)]
    GM = 3.98004418e14  #-- geocentric gravitational constant [m^3/s^2]
    ge = 9.7803278  #-- mean equatorial gravity [m/s^2]
    a_axis = 6378136.6  #-- semimajor axis of the WGS84 ellipsoid [m]
    flat = 1.0 / 298.257223563  #-- flattening of the WGS84 ellipsoid
    b_axis = (1.0 -
              flat) * a_axis  #-- semiminor axis of the WGS84 ellipsoid [m]
    omega = 7.292115e-5  #-- mean rotation rate of the Earth [radians/s]
    #-- tidal love number appropriate for the load tide
    hb2 = 0.6207
    #-- Linear eccentricity, first and second numerical eccentricity
    lin_ecc = np.sqrt((2.0 * flat - flat**2) * a_axis**2)
    ecc1 = lin_ecc / a_axis
    ecc2 = lin_ecc / b_axis
    #-- m parameter [omega^2*a^2*b/(GM)]. p. 70, Eqn.(2-137)
    m = omega**2 * ((1 - flat) * a_axis**3) / GM
    #-- flattening components
    f_2 = -flat + (5.0/2.0)*m + (1.0/2.0)*flat**2.0 - (26.0/7.0)*flat*m + \
     (15.0/4.0)*m**2.0
    f_4 = -(1.0 / 2.0) * flat**2.0 + (5.0 / 2.0) * flat * m

    #-- convert from geodetic latitude to geocentric latitude
    #-- geodetic latitude in radians
    latitude_geodetic_rad = lat * dtr
    #-- prime vertical radius of curvature
    N = a_axis / np.sqrt(1.0 - ecc1**2.0 * np.sin(latitude_geodetic_rad)**2.0)
    #-- calculate X, Y and Z from geodetic latitude and longitude
    X = (N + h1) * np.cos(latitude_geodetic_rad) * np.cos(lon * dtr)
    Y = (N + h1) * np.cos(latitude_geodetic_rad) * np.sin(lon * dtr)
    Z = (N * (1.0 - ecc1**2.0) + h1) * np.sin(latitude_geodetic_rad)
    rr = np.sqrt(X**2.0 + Y**2.0 + Z**2.0)
    #-- calculate geocentric latitude and convert to degrees
    latitude_geocentric = np.arctan(Z / np.sqrt(X**2.0 + Y**2.0)) / dtr
    #-- colatitude and longitude in radians
    theta = dtr * (90.0 - latitude_geocentric)
    phi = lon * dtr

    #-- compute normal gravity at spatial location and elevation of points.
    #-- normal gravity at the equator. p. 79, Eqn.(2-186)
    gamma_a = (GM / (a_axis * b_axis)) * (1.0 - (3.0 / 2.0) * m -
                                          (3.0 / 14.0) * ecc2**2.0 * m)
    #-- Normal gravity. p. 80, Eqn.(2-199)
    gamma_0 = gamma_a * (1.0 + f_2 * np.cos(theta)**2.0 + f_4 *
                         np.sin(np.pi * latitude_geocentric / 180.0)**4.0)
    #-- Normal gravity at height h. p. 82, Eqn.(2-215)
    gamma_h = gamma_0 * (
        1.0 - (2.0 / a_axis) *
        (1.0 + flat + m - 2.0 * flat * np.cos(theta)**2.0) * h1 +
        (3.0 / a_axis**2.0) * h1**2.0)

    #-- pole tide files (mean and daily)
    # mean_pole_file = os.path.join(tide_dir,'mean-pole.tab')
    mean_pole_file = os.path.join(tide_dir, 'mean_pole_2017-10-23.tab')
    pole_tide_file = os.path.join(tide_dir, 'finals_all_2017-09-01.tab')
    #-- read IERS daily polar motion values
    EOP = read_iers_EOP(pole_tide_file)
    #-- create cubic spline interpolations of daily polar motion values
    xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['x'], k=3, s=0)
    ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['y'], k=3, s=0)
    #-- bad value
    fill_value = -9999.0

    #-- output load pole tide HDF5 file
    #-- form: rg_NASA_LOAD_POLE_TIDE_WGS84_fl1yyyymmddjjjjj.H5
    #-- where rg is the hemisphere flag (GR or AN) for the region
    #-- fl1 and fl2 are the data flags (ATM, LVIS, GLAS)
    #-- yymmddjjjjj is the year, month, day and second of the input file
    #-- output region flags: GR for Greenland and AN for Antarctica
    hem_flag = {'N': 'GR', 'S': 'AN'}
    #-- use starting second to distinguish between files for the day
    JJ1 = np.min(dinput['time']) % 86400
    #-- output file format
    args = (hem_flag[HEM], 'LOAD_POLE_TIDE', OIB, YY1, MM1, DD1, JJ1)
    FILENAME = '{0}_NASA_{1}_WGS84_{2}{3}{4}{5}{6:05.0f}.H5'.format(*args)
    #-- print file information
    print('\t{0}'.format(FILENAME)) if VERBOSE else None

    #-- open output HDF5 file
    fid = h5py.File(os.path.join(DIRECTORY, FILENAME), 'w')

    #-- calculate angular coordinates of mean pole at time tdec
    mpx, mpy, fl = iers_mean_pole(mean_pole_file, tdec, '2015')
    #-- interpolate daily polar motion values to time using cubic splines
    px = xSPL(t)
    py = ySPL(t)
    #-- calculate differentials from mean pole positions
    mx = px - mpx
    my = -(py - mpy)
    #-- calculate radial displacement at time
    dfactor = -hb2 * atr * (omega**2 * rr**2) / (2.0 * gamma_h)
    Sr = dfactor * np.sin(2.0 * theta) * (mx * np.cos(phi) + my * np.sin(phi))

    #-- add latitude and longitude to output file
    for key in ['lat', 'lon']:
        #-- Defining the HDF5 dataset variables for lat/lon
        h5 = fid.create_dataset(key, (file_lines, ),
                                data=dinput[key][:],
                                dtype=dinput[key].dtype,
                                compression='gzip')
        #-- add HDF5 variable attributes
        for att_name, att_val in attrib[key].items():
            h5.attrs[att_name] = att_val
        #-- attach dimensions
        h5.dims[0].label = 'RECORD_SIZE'

    #-- output tides to HDF5 dataset
    h5 = fid.create_dataset('tide_pole', (file_lines, ),
                            data=Sr,
                            dtype=Sr.dtype,
                            compression='gzip')
    #-- add HDF5 variable attributes
    h5.attrs['_FillValue'] = fill_value
    for att_name, att_val in attrib['tide_pole'].items():
        h5.attrs[att_name] = att_val
    #-- attach dimensions
    h5.dims[0].label = 'RECORD_SIZE'

    #-- output days to HDF5 dataset
    h5 = fid.create_dataset('MJD', (file_lines, ),
                            data=t,
                            dtype=t.dtype,
                            compression='gzip')
    #-- add HDF5 variable attributes
    for att_name, att_val in attrib['MJD'].items():
        h5.attrs[att_name] = att_val
    #-- attach dimensions
    h5.dims[0].label = 'RECORD_SIZE'

    #-- HDF5 file attributes
    fid.attrs['featureType'] = 'trajectory'
    fid.attrs['title'] = 'Load_Pole_Tide_correction_for_elevation_measurements'
    fid.attrs['summary'] = ('Solid_Earth_pole_tide_radial_displacements_'
                            'computed_at_elevation_measurements.')
    fid.attrs['project'] = 'NASA_Operation_IceBridge'
    fid.attrs['processing_level'] = '4'
    fid.attrs['date_created'] = time.strftime('%Y-%m-%d', time.localtime())
    #-- add attributes for input files
    fid.attrs['elevation_file'] = os.path.basename(input_file)
    #-- add geospatial and temporal attributes
    fid.attrs['geospatial_lat_min'] = dinput['lat'].min()
    fid.attrs['geospatial_lat_max'] = dinput['lat'].max()
    fid.attrs['geospatial_lon_min'] = dinput['lon'].min()
    fid.attrs['geospatial_lon_max'] = dinput['lon'].max()
    fid.attrs['geospatial_lat_units'] = "degrees_north"
    fid.attrs['geospatial_lon_units'] = "degrees_east"
    fid.attrs['geospatial_ellipsoid'] = "WGS84"
    fid.attrs['time_type'] = 'UTC'

    #-- convert start/end time from MJD into Julian days
    JD_start = np.min(t) + 2400000.5
    JD_end = np.max(t) + 2400000.5
    #-- convert to calendar date with convert_julian.py
    cal = convert_julian(np.array([JD_start, JD_end]), ASTYPE=np.int)
    #-- add attributes with measurement date start, end and duration
    args = (cal['hour'][0], cal['minute'][0], cal['second'][0])
    fid.attrs['RangeBeginningTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args)
    args = (cal['hour'][-1], cal['minute'][-1], cal['second'][-1])
    fid.attrs['RangeEndingTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args)
    args = (cal['year'][0], cal['month'][0], cal['day'][0])
    fid.attrs['RangeBeginningDate'] = '{0:4d}-{1:02d}-{2:02d}'.format(*args)
    args = (cal['year'][-1], cal['month'][-1], cal['day'][-1])
    fid.attrs['RangeEndingDate'] = '{0:4d}-{1:02d}-{2:02d}'.format(*args)
    duration = np.round(JD_end * 86400.0 - JD_start * 86400.0)
    fid.attrs['DurationTimeSeconds'] = '{0:0.0f}'.format(duration)
    #-- close the output HDF5 dataset
    fid.close()
    #-- change the permissions level to MODE
    os.chmod(os.path.join(DIRECTORY, FILENAME), MODE)