Example #1
0
def test_julian(YEAR, MONTH):
    #-- days per month in a leap and a standard year
    #-- only difference is February (29 vs. 28)
    dpm_leap = np.array([31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31])
    dpm_stnd = np.array([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31])
    DPM = dpm_stnd if np.mod(YEAR, 4) else dpm_leap
    #-- calculate Modified Julian Day (MJD) from calendar date
    DAY = np.random.randint(1, DPM[MONTH - 1] + 1)
    HOUR = np.random.randint(0, 23 + 1)
    MINUTE = np.random.randint(0, 59 + 1)
    SECOND = 60.0 * np.random.random_sample(1)
    MJD = icesat2_toolkit.time.convert_calendar_dates(YEAR,
                                                      MONTH,
                                                      DAY,
                                                      hour=HOUR,
                                                      minute=MINUTE,
                                                      second=SECOND,
                                                      epoch=(1858, 11, 17, 0,
                                                             0, 0))
    #-- convert MJD to calendar date
    YY, MM, DD, HH, MN, SS = convert_julian(MJD + 2400000.5, FORMAT='tuple')
    #-- assert dates
    eps = np.finfo(np.float16).eps
    assert (YY == YEAR)
    assert (MM == MONTH)
    assert (DD == DAY)
    assert (HH == HOUR)
    assert (MN == MINUTE)
    assert (np.abs(SS - SECOND) < eps)
def convert_delta_time(delta_time, gps_epoch=1198800018.0):
    """
    converts ICESat-2 delta_times into into Julian and year-decimal

    Arguments
    ---------
    delta_time: seconds since gps_epoch

    Keyword arguments
    -----------------
    gps_epoch: seconds between delta_time and GPS epoch (1980-01-06T00:00:00)

    Returns
    -------
    julian: time in Julian days
    decimal: time in year-decimal
    """
    #-- convert to array if single value
    if (np.ndim(delta_time) == 0):
        delta_time = np.array([delta_time])
    #-- calculate gps time from delta_time
    gps_seconds = gps_epoch + delta_time
    time_leaps = count_leap_seconds(gps_seconds)
    #-- calculate Julian time (UTC)
    time_julian = 2444244.5 + (gps_seconds - time_leaps)/86400.0
    #-- convert to calendar date with convert_julian.py
    Y,M,D,h,m,s = convert_julian(time_julian,FORMAT='tuple')
    #-- calculate year-decimal time (UTC)
    time_decimal = convert_calendar_decimal(Y,M,DAY=D,HOUR=h,MINUTE=m,SECOND=s)
    #-- return both the Julian and year-decimal formatted dates
    return dict(julian=np.squeeze(time_julian),decimal=np.squeeze(time_decimal))
def convert_delta_time(delta_time, gps_epoch=1198800018.0):
    """
    converts ICESat-2 delta_times into into Julian and year-decimal

    Arguments
    ---------
    delta_time: seconds since gps_epoch

    Keyword arguments
    -----------------
    gps_epoch: seconds between delta_time and GPS epoch (1980-01-06T00:00:00)

    Returns
    -------
    julian: time in Julian days
    decimal: time in year-decimal
    """
    #-- convert to array if single value
    delta_time = np.atleast_1d(delta_time)
    #-- calculate gps time from delta_time
    gps_seconds = gps_epoch + delta_time
    time_leaps = icesat2_toolkit.time.count_leap_seconds(gps_seconds)
    #-- calculate Julian time (UTC) by converting to MJD and then adding offset
    time_julian = 2400000.5 + icesat2_toolkit.time.convert_delta_time(
        gps_seconds - time_leaps,
        epoch1=(1980, 1, 6, 0, 0, 0),
        epoch2=(1858, 11, 17, 0, 0, 0),
        scale=1.0 / 86400.0)
    #-- convert to calendar date with convert_julian.py
    Y, M, D, h, m, s = convert_julian(time_julian, FORMAT='tuple')
    #-- calculate year-decimal time (UTC)
    time_decimal = convert_calendar_decimal(Y,
                                            M,
                                            DAY=D,
                                            HOUR=h,
                                            MINUTE=m,
                                            SECOND=s)
    #-- return both the Julian and year-decimal formatted dates
    return dict(julian=np.squeeze(time_julian),
                decimal=np.squeeze(time_decimal))
def HDF5_ATL06_mask_write(IS2_atl06_mask,
                          IS2_atl06_attrs,
                          INPUT=None,
                          FILENAME='',
                          FILL_VALUE=None,
                          CLOBBER=True):
    #-- setting HDF5 clobber attribute
    if CLOBBER:
        clobber = 'w'
    else:
        clobber = 'w-'

    #-- open output HDF5 file
    fileID = h5py.File(os.path.expanduser(FILENAME), clobber)

    #-- create HDF5 records
    h5 = {}

    #-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC)
    #-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC)
    h5['ancillary_data'] = {}
    for k, v in IS2_atl06_mask['ancillary_data'].items():
        #-- Defining the HDF5 dataset variables
        val = 'ancillary_data/{0}'.format(k)
        h5['ancillary_data'][k] = fileID.create_dataset(val,
                                                        np.shape(v),
                                                        data=v,
                                                        dtype=v.dtype,
                                                        compression='gzip')
        #-- add HDF5 variable attributes
        for att_name, att_val in IS2_atl06_attrs['ancillary_data'][k].items():
            h5['ancillary_data'][k].attrs[att_name] = att_val

    #-- write each output beam
    beams = [
        k for k in IS2_atl06_mask.keys() if bool(re.match(r'gt\d[lr]', k))
    ]
    for gtx in beams:
        fileID.create_group(gtx)
        #-- add HDF5 group attributes for beam
        for att_name in [
                'Description', 'atlas_pce', 'atlas_beam_type',
                'groundtrack_id', 'atmosphere_profile', 'atlas_spot_number',
                'sc_orientation'
        ]:
            fileID[gtx].attrs[att_name] = IS2_atl06_attrs[gtx][att_name]
        #-- create land_ice_segments group
        fileID[gtx].create_group('land_ice_segments')
        h5[gtx] = dict(land_ice_segments={})
        for att_name in ['Description', 'data_rate']:
            att_val = IS2_atl06_attrs[gtx]['land_ice_segments'][att_name]
            fileID[gtx]['land_ice_segments'].attrs[att_name] = att_val

        #-- segment_id
        v = IS2_atl06_mask[gtx]['land_ice_segments']['segment_id']
        attrs = IS2_atl06_attrs[gtx]['land_ice_segments']['segment_id']
        #-- Defining the HDF5 dataset variables
        val = '{0}/{1}/{2}'.format(gtx, 'land_ice_segments', 'segment_id')
        h5[gtx]['land_ice_segments']['segment_id'] = fileID.create_dataset(
            val, np.shape(v), data=v, dtype=v.dtype, compression='gzip')
        #-- add HDF5 variable attributes
        for att_name, att_val in attrs.items():
            h5[gtx]['land_ice_segments']['segment_id'].attrs[
                att_name] = att_val

        #-- geolocation, time and height variables
        for k in ['latitude', 'longitude', 'delta_time']:
            #-- values and attributes
            v = IS2_atl06_mask[gtx]['land_ice_segments'][k]
            attrs = IS2_atl06_attrs[gtx]['land_ice_segments'][k]
            fillvalue = FILL_VALUE[gtx]['land_ice_segments'][k]
            #-- Defining the HDF5 dataset variables
            val = '{0}/{1}/{2}'.format(gtx, 'land_ice_segments', k)
            h5[gtx]['land_ice_segments'][k] = fileID.create_dataset(
                val,
                np.shape(v),
                data=v,
                dtype=v.dtype,
                fillvalue=fillvalue,
                compression='gzip')
            #-- attach dimensions
            for dim in ['segment_id']:
                h5[gtx]['land_ice_segments'][k].dims.create_scale(
                    h5[gtx]['land_ice_segments'][dim], dim)
                h5[gtx]['land_ice_segments'][k].dims[0].attach_scale(
                    h5[gtx]['land_ice_segments'][dim])
            #-- add HDF5 variable attributes
            for att_name, att_val in attrs.items():
                h5[gtx]['land_ice_segments'][k].attrs[att_name] = att_val

        #-- add to subsetting variables
        key = 'subsetting'
        fileID[gtx]['land_ice_segments'].create_group(key)
        h5[gtx]['land_ice_segments'][key] = {}
        for att_name in ['Description', 'data_rate']:
            att_val = IS2_atl06_attrs[gtx]['land_ice_segments'][key][att_name]
            fileID[gtx]['land_ice_segments'][key].attrs[att_name] = att_val
        for k, v in IS2_atl06_mask[gtx]['land_ice_segments'][key].items():
            #-- attributes
            attrs = IS2_atl06_attrs[gtx]['land_ice_segments'][key][k]
            #-- Defining the HDF5 dataset variables
            val = '{0}/{1}/{2}/{3}'.format(gtx, 'land_ice_segments', key, k)
            h5[gtx]['land_ice_segments'][key][k] = \
                fileID.create_dataset(val, np.shape(v), data=v,
                dtype=v.dtype, compression='gzip')
            #-- attach dimensions
            for dim in ['segment_id']:
                h5[gtx]['land_ice_segments'][key][k].dims.create_scale(
                    h5[gtx]['land_ice_segments'][dim], dim)
                h5[gtx]['land_ice_segments'][key][k].dims[0].attach_scale(
                    h5[gtx]['land_ice_segments'][dim])
            #-- add HDF5 variable attributes
            for att_name, att_val in attrs.items():
                h5[gtx]['land_ice_segments'][key][k].attrs[att_name] = att_val

    #-- HDF5 file title
    fileID.attrs['featureType'] = 'trajectory'
    fileID.attrs['title'] = 'ATLAS/ICESat-2 Land Ice Height'
    fileID.attrs['summary'] = (
        'Subsetting masks for ice-sheets segments '
        'needed to interpret and assess the quality of land height estimates.')
    fileID.attrs['description'] = (
        'Land ice parameters for each beam.  All '
        'parameters are calculated for the same along-track increments for '
        'each beam and repeat.')
    date_created = datetime.datetime.today()
    fileID.attrs['date_created'] = date_created.isoformat()
    project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
    fileID.attrs['project'] = project
    platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
    fileID.attrs['project'] = platform
    #-- add attribute for elevation instrument and designated processing level
    instrument = 'ATLAS > Advanced Topographic Laser Altimeter System'
    fileID.attrs['instrument'] = instrument
    fileID.attrs['source'] = 'Spacecraft'
    fileID.attrs['references'] = 'http://nsidc.org/data/icesat2/data.html'
    fileID.attrs['processing_level'] = '4'
    #-- add attributes for input ATL06 files
    fileID.attrs['input_files'] = ','.join(
        [os.path.basename(i) for i in INPUT])
    #-- find geospatial and temporal ranges
    lnmn, lnmx, ltmn, ltmx, tmn, tmx = (np.inf, -np.inf, np.inf, -np.inf,
                                        np.inf, -np.inf)
    for gtx in beams:
        lon = IS2_atl06_mask[gtx]['land_ice_segments']['longitude']
        lat = IS2_atl06_mask[gtx]['land_ice_segments']['latitude']
        delta_time = IS2_atl06_mask[gtx]['land_ice_segments']['delta_time']
        #-- setting the geospatial and temporal ranges
        lnmn = lon.min() if (lon.min() < lnmn) else lnmn
        lnmx = lon.max() if (lon.max() > lnmx) else lnmx
        ltmn = lat.min() if (lat.min() < ltmn) else ltmn
        ltmx = lat.max() if (lat.max() > ltmx) else ltmx
        tmn = delta_time.min() if (delta_time.min() < tmn) else tmn
        tmx = delta_time.max() if (delta_time.max() > tmx) else tmx
    #-- add geospatial and temporal attributes
    fileID.attrs['geospatial_lat_min'] = ltmn
    fileID.attrs['geospatial_lat_max'] = ltmx
    fileID.attrs['geospatial_lon_min'] = lnmn
    fileID.attrs['geospatial_lon_max'] = lnmx
    fileID.attrs['geospatial_lat_units'] = "degrees_north"
    fileID.attrs['geospatial_lon_units'] = "degrees_east"
    fileID.attrs['geospatial_ellipsoid'] = "WGS84"
    fileID.attrs['date_type'] = 'UTC'
    fileID.attrs['time_type'] = 'CCSDS UTC-A'
    #-- convert start and end time from ATLAS SDP seconds into UTC time
    time_utc = convert_delta_time(np.array([tmn, tmx]))
    #-- convert to calendar date with convert_julian.py
    YY, MM, DD, HH, MN, SS = convert_julian(time_utc['julian'], FORMAT='tuple')
    #-- add attributes with measurement date start, end and duration
    tcs = datetime.datetime(np.int(YY[0]), np.int(MM[0]), np.int(DD[0]),
                            np.int(HH[0]), np.int(MN[0]), np.int(SS[0]),
                            np.int(1e6 * (SS[0] % 1)))
    fileID.attrs['time_coverage_start'] = tcs.isoformat()
    tce = datetime.datetime(np.int(YY[1]), np.int(MM[1]), np.int(DD[1]),
                            np.int(HH[1]), np.int(MN[1]), np.int(SS[1]),
                            np.int(1e6 * (SS[1] % 1)))
    fileID.attrs['time_coverage_end'] = tce.isoformat()
    fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx - tmn)
    #-- Closing the HDF5 file
    fileID.close()
Example #5
0
def HDF5_ATL03_dem_write(IS2_atl03_dem, IS2_atl03_attrs, INPUT=None,
    FILENAME='', FILL_VALUE=None, CLOBBER=True):
    #-- setting HDF5 clobber attribute
    if CLOBBER:
        clobber = 'w'
    else:
        clobber = 'w-'

    #-- open output HDF5 file
    fileID = h5py.File(os.path.expanduser(FILENAME), clobber)

    #-- create HDF5 records
    h5 = {}

    #-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC)
    #-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC)
    h5['ancillary_data'] = {}
    for k,v in IS2_atl03_dem['ancillary_data'].items():
        #-- Defining the HDF5 dataset variables
        val = 'ancillary_data/{0}'.format(k)
        h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v,
            dtype=v.dtype, compression='gzip')
        #-- add HDF5 variable attributes
        for att_name,att_val in IS2_atl03_attrs['ancillary_data'][k].items():
            h5['ancillary_data'][k].attrs[att_name] = att_val

    #-- write each output beam
    beams = [k for k in IS2_atl03_dem.keys() if bool(re.match(r'gt\d[lr]',k))]
    for gtx in beams:
        fileID.create_group(gtx)
        #-- add HDF5 group attributes for beam
        for att_name in ['Description','atlas_pce','atlas_beam_type',
            'groundtrack_id','atmosphere_profile','atlas_spot_number',
            'sc_orientation']:
            fileID[gtx].attrs[att_name] = IS2_atl03_attrs[gtx][att_name]
        #-- create heights group
        fileID[gtx].create_group('heights')
        h5[gtx] = dict(heights={})
        for att_name in ['Description','data_rate']:
            att_val = IS2_atl03_attrs[gtx]['heights'][att_name]
            fileID[gtx]['heights'].attrs[att_name] = att_val

        #-- delta_time
        v = IS2_atl03_dem[gtx]['heights']['delta_time']
        attrs = IS2_atl03_attrs[gtx]['heights']['delta_time']
        #-- Defining the HDF5 dataset variables
        val = '{0}/{1}/{2}'.format(gtx,'heights','delta_time')
        h5[gtx]['heights']['delta_time'] = fileID.create_dataset(val,
            np.shape(v), data=v, dtype=v.dtype, compression='gzip')
        #-- add HDF5 variable attributes
        for att_name,att_val in attrs.items():
            h5[gtx]['heights']['delta_time'].attrs[att_name] = att_val

        #-- geolocation and height variables
        for k in ['latitude','longitude','dem_h']:
            #-- values and attributes
            v = IS2_atl03_dem[gtx]['heights'][k]
            attrs = IS2_atl03_attrs[gtx]['heights'][k]
            fillvalue = FILL_VALUE[gtx]['heights'][k]
            #-- Defining the HDF5 dataset variables
            val = '{0}/{1}/{2}'.format(gtx,'heights',k)
            if fillvalue:
                h5[gtx]['heights'][k] = fileID.create_dataset(val, np.shape(v),
                    data=v,dtype=v.dtype,fillvalue=fillvalue,compression='gzip')
            else:
                h5[gtx]['heights'][k] = fileID.create_dataset(val, np.shape(v),
                    data=v, dtype=v.dtype, compression='gzip')
            #-- attach dimensions
            for dim in ['delta_time']:
                h5[gtx]['heights'][k].dims.create_scale(
                    h5[gtx]['heights'][dim], dim)
                h5[gtx]['heights'][k].dims[0].attach_scale(
                    h5[gtx]['heights'][dim])
            #-- add HDF5 variable attributes
            for att_name,att_val in attrs.items():
                h5[gtx]['heights'][k].attrs[att_name] = att_val

        #-- create subsetting group
        fileID[gtx].create_group('subsetting')
        h5[gtx]['subsetting'] = {}
        for att_name in ['Description','data_rate']:
            att_val = IS2_atl03_attrs[gtx]['subsetting'][att_name]
            fileID[gtx]['subsetting'].attrs[att_name] = att_val
        #-- add to subsetting variables
        for k,v in IS2_atl03_dem[gtx]['subsetting'].items():
            #-- attributes
            attrs = IS2_atl03_attrs[gtx]['subsetting'][k]
            #-- Defining the HDF5 dataset variables
            val = '{0}/{1}/{2}'.format(gtx,'subsetting',k)
            h5[gtx]['subsetting'][k] = fileID.create_dataset(val, np.shape(v),
                data=v, dtype=v.dtype, compression='gzip')
            #-- attach dimensions
            for dim in ['delta_time']:
                h5[gtx]['subsetting'][k].dims.create_scale(
                    h5[gtx]['heights'][dim], dim)
                h5[gtx]['subsetting'][k].dims[0].attach_scale(
                    h5[gtx]['heights'][dim])
            #-- add HDF5 variable attributes
            for att_name,att_val in attrs.items():
                h5[gtx]['subsetting'][k].attrs[att_name] = att_val

    #-- HDF5 file title
    fileID.attrs['featureType'] = 'trajectory'
    fileID.attrs['title'] = 'ATLAS/ICESat-2 L2A Global Geolocated Photon Data'
    fileID.attrs['summary'] = ("The purpose of ATL03 is to provide along-track "
        "photon data for all 6 ATLAS beams and associated statistics.")
    fileID.attrs['description'] = ("Photon heights determined by ATBD "
        "Algorithm using POD and PPD. All photon events per transmit pulse per "
        "beam. Includes POD and PPD vectors. Classification of each photon by "
        "several ATBD Algorithms.")
    date_created = datetime.datetime.today()
    fileID.attrs['date_created'] = date_created.isoformat()
    project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
    fileID.attrs['project'] = project
    platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
    fileID.attrs['project'] = platform
    #-- add attribute for elevation instrument and designated processing level
    instrument = 'ATLAS > Advanced Topographic Laser Altimeter System'
    fileID.attrs['instrument'] = instrument
    fileID.attrs['source'] = 'Spacecraft'
    fileID.attrs['references'] = 'http://nsidc.org/data/icesat2/data.html'
    fileID.attrs['processing_level'] = '4'
    #-- add attributes for input ATL03 and ATL09 files
    fileID.attrs['input_files'] = ','.join([os.path.basename(i) for i in INPUT])
    #-- find geospatial and temporal ranges
    lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf)
    for gtx in beams:
        lon = IS2_atl03_dem[gtx]['heights']['longitude']
        lat = IS2_atl03_dem[gtx]['heights']['latitude']
        delta_time = IS2_atl03_dem[gtx]['heights']['delta_time']
        #-- setting the geospatial and temporal ranges
        lnmn = lon.min() if (lon.min() < lnmn) else lnmn
        lnmx = lon.max() if (lon.max() > lnmx) else lnmx
        ltmn = lat.min() if (lat.min() < ltmn) else ltmn
        ltmx = lat.max() if (lat.max() > ltmx) else ltmx
        tmn = delta_time.min() if (delta_time.min() < tmn) else tmn
        tmx = delta_time.max() if (delta_time.max() > tmx) else tmx
    #-- add geospatial and temporal attributes
    fileID.attrs['geospatial_lat_min'] = ltmn
    fileID.attrs['geospatial_lat_max'] = ltmx
    fileID.attrs['geospatial_lon_min'] = lnmn
    fileID.attrs['geospatial_lon_max'] = lnmx
    fileID.attrs['geospatial_lat_units'] = "degrees_north"
    fileID.attrs['geospatial_lon_units'] = "degrees_east"
    fileID.attrs['geospatial_ellipsoid'] = "WGS84"
    fileID.attrs['date_type'] = 'UTC'
    fileID.attrs['time_type'] = 'CCSDS UTC-A'
    #-- convert start and end time from ATLAS SDP seconds into Julian days
    atlas_sdp_gps_epoch=IS2_atl03_dem['ancillary_data']['atlas_sdp_gps_epoch']
    gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx])
    time_leaps = count_leap_seconds(gps_seconds)
    time_julian = 2444244.5 + (gps_seconds - time_leaps)/86400.0
    #-- convert to calendar date with convert_julian.py
    YY,MM,DD,HH,MN,SS = convert_julian(time_julian,FORMAT='tuple')
    #-- add attributes with measurement date start, end and duration
    tcs = datetime.datetime(np.int(YY[0]), np.int(MM[0]), np.int(DD[0]),
        np.int(HH[0]), np.int(MN[0]), np.int(SS[0]), np.int(1e6*(SS[0] % 1)))
    fileID.attrs['time_coverage_start'] = tcs.isoformat()
    tce = datetime.datetime(np.int(YY[1]), np.int(MM[1]), np.int(DD[1]),
        np.int(HH[1]), np.int(MN[1]), np.int(SS[1]), np.int(1e6*(SS[1] % 1)))
    fileID.attrs['time_coverage_end'] = tce.isoformat()
    fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn)
    #-- Closing the HDF5 file
    fileID.close()