def HDF5_ATL03_mask_write(IS2_atl03_mask,
                          IS2_atl03_attrs,
                          INPUT=None,
                          FILENAME='',
                          FILL_VALUE=None,
                          DIMENSIONS=None,
                          CLOBBER=True):
    #-- setting HDF5 clobber attribute
    if CLOBBER:
        clobber = 'w'
    else:
        clobber = 'w-'

    #-- open output HDF5 file
    fileID = h5py.File(os.path.expanduser(FILENAME), clobber)

    #-- create HDF5 records
    h5 = {}

    #-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC)
    #-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC)
    h5['ancillary_data'] = {}
    for k, v in IS2_atl03_mask['ancillary_data'].items():
        #-- Defining the HDF5 dataset variables
        val = 'ancillary_data/{0}'.format(k)
        h5['ancillary_data'][k] = fileID.create_dataset(val,
                                                        np.shape(v),
                                                        data=v,
                                                        dtype=v.dtype,
                                                        compression='gzip')
        #-- add HDF5 variable attributes
        for att_name, att_val in IS2_atl03_attrs['ancillary_data'][k].items():
            h5['ancillary_data'][k].attrs[att_name] = att_val

    #-- write each output beam
    beams = [
        k for k in IS2_atl03_mask.keys() if bool(re.match(r'gt\d[lr]', k))
    ]
    for gtx in beams:
        fileID.create_group(gtx)
        #-- add HDF5 group attributes for beam
        for att_name in [
                'Description', 'atlas_pce', 'atlas_beam_type',
                'groundtrack_id', 'atmosphere_profile', 'atlas_spot_number',
                'sc_orientation'
        ]:
            fileID[gtx].attrs[att_name] = IS2_atl03_attrs[gtx][att_name]

        #-- for each output data group
        for key in ['heights', 'subsetting']:
            #-- create group
            fileID[gtx].create_group(key)
            h5[gtx][key] = {}
            for att_name in ['Description', 'data_rate']:
                att_val = IS2_atl03_attrs[gtx][key][att_name]
                fileID[gtx][key].attrs[att_name] = att_val

            #-- all variables for group
            groupkeys = set(IS2_atl03_mask[gtx][key].keys()) - set(
                ['delta_time'])
            for k in ['delta_time', *sorted(groupkeys)]:
                #-- values and attributes
                v = IS2_atl03_mask[gtx][key][k]
                attrs = IS2_atl03_attrs[gtx][key][k]
                fillvalue = FILL_VALUE[gtx][key][k]
                #-- Defining the HDF5 dataset variables
                val = '{0}/{1}/{2}'.format(gtx, key, k)
                if fillvalue:
                    h5[gtx][key][k] = fileID.create_dataset(
                        val,
                        np.shape(v),
                        data=v,
                        dtype=v.dtype,
                        fillvalue=fillvalue,
                        compression='gzip')
                else:
                    h5[gtx][key][k] = fileID.create_dataset(val,
                                                            np.shape(v),
                                                            data=v,
                                                            dtype=v.dtype,
                                                            compression='gzip')
                #-- create or attach dimensions for HDF5 variable
                if DIMENSIONS[gtx][key][k]:
                    #-- attach dimensions
                    for i, dim in enumerate(DIMENSIONS[gtx][key][k]):
                        h5[gtx][key][k].dims[i].attach_scale(h5[gtx][key][dim])
                else:
                    #-- make dimension
                    h5[gtx][key][k].make_scale(k)
                #-- add HDF5 variable attributes
                for att_name, att_val in attrs.items():
                    h5[gtx][key][k].attrs[att_name] = att_val

    #-- HDF5 file title
    fileID.attrs['featureType'] = 'trajectory'
    fileID.attrs['title'] = 'ATLAS/ICESat-2 L2A Global Geolocated Photon Data'
    fileID.attrs['summary'] = (
        "The purpose of ATL03 is to provide along-track "
        "photon data for all 6 ATLAS beams and associated statistics.")
    fileID.attrs['description'] = (
        "Photon heights determined by ATBD "
        "Algorithm using POD and PPD. All photon events per transmit pulse per "
        "beam. Includes POD and PPD vectors. Classification of each photon by "
        "several ATBD Algorithms.")
    date_created = datetime.datetime.today()
    fileID.attrs['date_created'] = date_created.isoformat()
    project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
    fileID.attrs['project'] = project
    platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
    fileID.attrs['project'] = platform
    #-- add attribute for elevation instrument and designated processing level
    instrument = 'ATLAS > Advanced Topographic Laser Altimeter System'
    fileID.attrs['instrument'] = instrument
    fileID.attrs['source'] = 'Spacecraft'
    fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2'
    fileID.attrs['processing_level'] = '4'
    #-- add attributes for input ATL03 and ATL09 files
    fileID.attrs['input_files'] = ','.join(
        [os.path.basename(i) for i in INPUT])
    #-- find geospatial and temporal ranges
    lnmn, lnmx, ltmn, ltmx, tmn, tmx = (np.inf, -np.inf, np.inf, -np.inf,
                                        np.inf, -np.inf)
    for gtx in beams:
        lon = IS2_atl03_mask[gtx]['heights']['longitude']
        lat = IS2_atl03_mask[gtx]['heights']['latitude']
        delta_time = IS2_atl03_mask[gtx]['heights']['delta_time']
        #-- setting the geospatial and temporal ranges
        lnmn = lon.min() if (lon.min() < lnmn) else lnmn
        lnmx = lon.max() if (lon.max() > lnmx) else lnmx
        ltmn = lat.min() if (lat.min() < ltmn) else ltmn
        ltmx = lat.max() if (lat.max() > ltmx) else ltmx
        tmn = delta_time.min() if (delta_time.min() < tmn) else tmn
        tmx = delta_time.max() if (delta_time.max() > tmx) else tmx
    #-- add geospatial and temporal attributes
    fileID.attrs['geospatial_lat_min'] = ltmn
    fileID.attrs['geospatial_lat_max'] = ltmx
    fileID.attrs['geospatial_lon_min'] = lnmn
    fileID.attrs['geospatial_lon_max'] = lnmx
    fileID.attrs['geospatial_lat_units'] = "degrees_north"
    fileID.attrs['geospatial_lon_units'] = "degrees_east"
    fileID.attrs['geospatial_ellipsoid'] = "WGS84"
    fileID.attrs['date_type'] = 'UTC'
    fileID.attrs['time_type'] = 'CCSDS UTC-A'
    #-- convert start and end time from ATLAS SDP seconds into UTC time
    time_utc = convert_delta_time(np.array([tmn, tmx]))
    #-- convert to calendar date
    YY, MM, DD, HH, MN, SS = icesat2_toolkit.time.convert_julian(
        time_utc['julian'], FORMAT='tuple')
    #-- add attributes with measurement date start, end and duration
    tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]),
                            int(MN[0]), int(SS[0]), int(1e6 * (SS[0] % 1)))
    fileID.attrs['time_coverage_start'] = tcs.isoformat()
    tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), int(HH[1]),
                            int(MN[1]), int(SS[1]), int(1e6 * (SS[1] % 1)))
    fileID.attrs['time_coverage_end'] = tce.isoformat()
    fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx - tmn)
    #-- Closing the HDF5 file
    fileID.close()
def HDF5_ATL06_mask_write(IS2_atl06_mask,
                          IS2_atl06_attrs,
                          INPUT=None,
                          FILENAME='',
                          FILL_VALUE=None,
                          CLOBBER=True):
    #-- setting HDF5 clobber attribute
    if CLOBBER:
        clobber = 'w'
    else:
        clobber = 'w-'

    #-- open output HDF5 file
    fileID = h5py.File(os.path.expanduser(FILENAME), clobber)

    #-- create HDF5 records
    h5 = {}

    #-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC)
    #-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC)
    h5['ancillary_data'] = {}
    for k, v in IS2_atl06_mask['ancillary_data'].items():
        #-- Defining the HDF5 dataset variables
        val = 'ancillary_data/{0}'.format(k)
        h5['ancillary_data'][k] = fileID.create_dataset(val,
                                                        np.shape(v),
                                                        data=v,
                                                        dtype=v.dtype,
                                                        compression='gzip')
        #-- add HDF5 variable attributes
        for att_name, att_val in IS2_atl06_attrs['ancillary_data'][k].items():
            h5['ancillary_data'][k].attrs[att_name] = att_val

    #-- write each output beam
    beams = [
        k for k in IS2_atl06_mask.keys() if bool(re.match(r'gt\d[lr]', k))
    ]
    for gtx in beams:
        fileID.create_group(gtx)
        #-- add HDF5 group attributes for beam
        for att_name in [
                'Description', 'atlas_pce', 'atlas_beam_type',
                'groundtrack_id', 'atmosphere_profile', 'atlas_spot_number',
                'sc_orientation'
        ]:
            fileID[gtx].attrs[att_name] = IS2_atl06_attrs[gtx][att_name]
        #-- create land_ice_segments group
        fileID[gtx].create_group('land_ice_segments')
        h5[gtx] = dict(land_ice_segments={})
        for att_name in ['Description', 'data_rate']:
            att_val = IS2_atl06_attrs[gtx]['land_ice_segments'][att_name]
            fileID[gtx]['land_ice_segments'].attrs[att_name] = att_val

        #-- segment_id
        v = IS2_atl06_mask[gtx]['land_ice_segments']['segment_id']
        attrs = IS2_atl06_attrs[gtx]['land_ice_segments']['segment_id']
        #-- Defining the HDF5 dataset variables
        val = '{0}/{1}/{2}'.format(gtx, 'land_ice_segments', 'segment_id')
        h5[gtx]['land_ice_segments']['segment_id'] = fileID.create_dataset(
            val, np.shape(v), data=v, dtype=v.dtype, compression='gzip')
        #-- add HDF5 variable attributes
        for att_name, att_val in attrs.items():
            h5[gtx]['land_ice_segments']['segment_id'].attrs[
                att_name] = att_val

        #-- geolocation, time and height variables
        for k in ['latitude', 'longitude', 'delta_time']:
            #-- values and attributes
            v = IS2_atl06_mask[gtx]['land_ice_segments'][k]
            attrs = IS2_atl06_attrs[gtx]['land_ice_segments'][k]
            fillvalue = FILL_VALUE[gtx]['land_ice_segments'][k]
            #-- Defining the HDF5 dataset variables
            val = '{0}/{1}/{2}'.format(gtx, 'land_ice_segments', k)
            h5[gtx]['land_ice_segments'][k] = fileID.create_dataset(
                val,
                np.shape(v),
                data=v,
                dtype=v.dtype,
                fillvalue=fillvalue,
                compression='gzip')
            #-- attach dimensions
            for dim in ['segment_id']:
                h5[gtx]['land_ice_segments'][k].dims.create_scale(
                    h5[gtx]['land_ice_segments'][dim], dim)
                h5[gtx]['land_ice_segments'][k].dims[0].attach_scale(
                    h5[gtx]['land_ice_segments'][dim])
            #-- add HDF5 variable attributes
            for att_name, att_val in attrs.items():
                h5[gtx]['land_ice_segments'][k].attrs[att_name] = att_val

        #-- add to subsetting variables
        key = 'subsetting'
        fileID[gtx]['land_ice_segments'].create_group(key)
        h5[gtx]['land_ice_segments'][key] = {}
        for att_name in ['Description', 'data_rate']:
            att_val = IS2_atl06_attrs[gtx]['land_ice_segments'][key][att_name]
            fileID[gtx]['land_ice_segments'][key].attrs[att_name] = att_val
        for k, v in IS2_atl06_mask[gtx]['land_ice_segments'][key].items():
            #-- attributes
            attrs = IS2_atl06_attrs[gtx]['land_ice_segments'][key][k]
            #-- Defining the HDF5 dataset variables
            val = '{0}/{1}/{2}/{3}'.format(gtx, 'land_ice_segments', key, k)
            h5[gtx]['land_ice_segments'][key][k] = \
                fileID.create_dataset(val, np.shape(v), data=v,
                dtype=v.dtype, compression='gzip')
            #-- attach dimensions
            for dim in ['segment_id']:
                h5[gtx]['land_ice_segments'][key][k].dims.create_scale(
                    h5[gtx]['land_ice_segments'][dim], dim)
                h5[gtx]['land_ice_segments'][key][k].dims[0].attach_scale(
                    h5[gtx]['land_ice_segments'][dim])
            #-- add HDF5 variable attributes
            for att_name, att_val in attrs.items():
                h5[gtx]['land_ice_segments'][key][k].attrs[att_name] = att_val

    #-- HDF5 file title
    fileID.attrs['featureType'] = 'trajectory'
    fileID.attrs['title'] = 'ATLAS/ICESat-2 Land Ice Height'
    fileID.attrs['summary'] = (
        'Subsetting masks for ice-sheets segments '
        'needed to interpret and assess the quality of land height estimates.')
    fileID.attrs['description'] = (
        'Land ice parameters for each beam.  All '
        'parameters are calculated for the same along-track increments for '
        'each beam and repeat.')
    date_created = datetime.datetime.today()
    fileID.attrs['date_created'] = date_created.isoformat()
    project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
    fileID.attrs['project'] = project
    platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
    fileID.attrs['project'] = platform
    #-- add attribute for elevation instrument and designated processing level
    instrument = 'ATLAS > Advanced Topographic Laser Altimeter System'
    fileID.attrs['instrument'] = instrument
    fileID.attrs['source'] = 'Spacecraft'
    fileID.attrs['references'] = 'http://nsidc.org/data/icesat2/data.html'
    fileID.attrs['processing_level'] = '4'
    #-- add attributes for input ATL06 files
    fileID.attrs['input_files'] = ','.join(
        [os.path.basename(i) for i in INPUT])
    #-- find geospatial and temporal ranges
    lnmn, lnmx, ltmn, ltmx, tmn, tmx = (np.inf, -np.inf, np.inf, -np.inf,
                                        np.inf, -np.inf)
    for gtx in beams:
        lon = IS2_atl06_mask[gtx]['land_ice_segments']['longitude']
        lat = IS2_atl06_mask[gtx]['land_ice_segments']['latitude']
        delta_time = IS2_atl06_mask[gtx]['land_ice_segments']['delta_time']
        #-- setting the geospatial and temporal ranges
        lnmn = lon.min() if (lon.min() < lnmn) else lnmn
        lnmx = lon.max() if (lon.max() > lnmx) else lnmx
        ltmn = lat.min() if (lat.min() < ltmn) else ltmn
        ltmx = lat.max() if (lat.max() > ltmx) else ltmx
        tmn = delta_time.min() if (delta_time.min() < tmn) else tmn
        tmx = delta_time.max() if (delta_time.max() > tmx) else tmx
    #-- add geospatial and temporal attributes
    fileID.attrs['geospatial_lat_min'] = ltmn
    fileID.attrs['geospatial_lat_max'] = ltmx
    fileID.attrs['geospatial_lon_min'] = lnmn
    fileID.attrs['geospatial_lon_max'] = lnmx
    fileID.attrs['geospatial_lat_units'] = "degrees_north"
    fileID.attrs['geospatial_lon_units'] = "degrees_east"
    fileID.attrs['geospatial_ellipsoid'] = "WGS84"
    fileID.attrs['date_type'] = 'UTC'
    fileID.attrs['time_type'] = 'CCSDS UTC-A'
    #-- convert start and end time from ATLAS SDP seconds into UTC time
    time_utc = convert_delta_time(np.array([tmn, tmx]))
    #-- convert to calendar date with convert_julian.py
    YY, MM, DD, HH, MN, SS = convert_julian(time_utc['julian'], FORMAT='tuple')
    #-- add attributes with measurement date start, end and duration
    tcs = datetime.datetime(np.int(YY[0]), np.int(MM[0]), np.int(DD[0]),
                            np.int(HH[0]), np.int(MN[0]), np.int(SS[0]),
                            np.int(1e6 * (SS[0] % 1)))
    fileID.attrs['time_coverage_start'] = tcs.isoformat()
    tce = datetime.datetime(np.int(YY[1]), np.int(MM[1]), np.int(DD[1]),
                            np.int(HH[1]), np.int(MN[1]), np.int(SS[1]),
                            np.int(1e6 * (SS[1] % 1)))
    fileID.attrs['time_coverage_end'] = tce.isoformat()
    fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx - tmn)
    #-- Closing the HDF5 file
    fileID.close()
Esempio n. 3
0
def HDF5_ATL11_mask_write(IS2_atl11_mask,
                          IS2_atl11_attrs,
                          INPUT=None,
                          FILENAME='',
                          FILL_VALUE=None,
                          DIMENSIONS=None,
                          CLOBBER=True):
    #-- setting HDF5 clobber attribute
    if CLOBBER:
        clobber = 'w'
    else:
        clobber = 'w-'

    #-- open output HDF5 file
    fileID = h5py.File(os.path.expanduser(FILENAME), clobber)

    #-- create HDF5 records
    h5 = {}

    #-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC)
    #-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC)
    h5['ancillary_data'] = {}
    for k, v in IS2_atl11_mask['ancillary_data'].items():
        #-- Defining the HDF5 dataset variables
        val = 'ancillary_data/{0}'.format(k)
        h5['ancillary_data'][k] = fileID.create_dataset(val,
                                                        np.shape(v),
                                                        data=v,
                                                        dtype=v.dtype,
                                                        compression='gzip')
        #-- add HDF5 variable attributes
        for att_name, att_val in IS2_atl11_attrs['ancillary_data'][k].items():
            h5['ancillary_data'][k].attrs[att_name] = att_val

    #-- write each output beam pair
    pairs = [k for k in IS2_atl11_mask.keys() if bool(re.match(r'pt\d', k))]
    for ptx in pairs:
        fileID.create_group(ptx)
        #-- add HDF5 group attributes for beam pair
        for att_name in [
                'description', 'beam_pair', 'ReferenceGroundTrack',
                'first_cycle', 'last_cycle', 'equatorial_radius',
                'polar_radius'
        ]:
            fileID[ptx].attrs[att_name] = IS2_atl11_attrs[ptx][att_name]

        #-- ref_pt, cycle number, geolocation and delta_time variables
        for k in [
                'ref_pt', 'cycle_number', 'delta_time', 'latitude', 'longitude'
        ]:
            #-- values and attributes
            v = IS2_atl11_mask[ptx][k]
            attrs = IS2_atl11_attrs[ptx][k]
            fillvalue = FILL_VALUE[ptx][k]
            #-- Defining the HDF5 dataset variables
            val = '{0}/{1}'.format(ptx, k)
            if fillvalue:
                h5[ptx][k] = fileID.create_dataset(val,
                                                   np.shape(v),
                                                   data=v,
                                                   dtype=v.dtype,
                                                   fillvalue=fillvalue,
                                                   compression='gzip')
            else:
                h5[ptx][k] = fileID.create_dataset(val,
                                                   np.shape(v),
                                                   data=v,
                                                   dtype=v.dtype,
                                                   compression='gzip')
            #-- create or attach dimensions for HDF5 variable
            if DIMENSIONS[ptx][k]:
                #-- attach dimensions
                for i, dim in enumerate(DIMENSIONS[ptx][k]):
                    h5[ptx][k].dims[i].attach_scale(h5[ptx][dim])
            else:
                #-- make dimension
                h5[ptx][k].make_scale(k)
            #-- add HDF5 variable attributes
            for att_name, att_val in attrs.items():
                h5[ptx][k].attrs[att_name] = att_val

        #-- add to subsetting variables
        fileID[ptx].create_group('subsetting')
        h5[ptx]['subsetting'] = {}
        for att_name in ['Description', 'data_rate']:
            att_val = IS2_atl11_attrs[ptx]['subsetting'][att_name]
            fileID[ptx]['subsetting'].attrs[att_name] = att_val
        for k, v in IS2_atl11_mask[ptx]['subsetting'].items():
            #-- attributes
            attrs = IS2_atl11_attrs[ptx]['subsetting'][k]
            fillvalue = FILL_VALUE[ptx]['subsetting'][k]
            #-- Defining the HDF5 dataset variables
            val = '{0}/{1}/{2}'.format(ptx, 'subsetting', k)
            if fillvalue:
                h5[ptx]['subsetting'][k] = fileID.create_dataset(
                    val,
                    np.shape(v),
                    data=v,
                    dtype=v.dtype,
                    fillvalue=fillvalue,
                    compression='gzip')
            else:
                h5[ptx]['subsetting'][k] = fileID.create_dataset(
                    val,
                    np.shape(v),
                    data=v,
                    dtype=v.dtype,
                    compression='gzip')
            #-- attach dimensions
            for i, dim in enumerate(DIMENSIONS[ptx]['subsetting'][k]):
                h5[ptx]['subsetting'][k].dims[i].attach_scale(h5[ptx][dim])
            #-- add HDF5 variable attributes
            for att_name, att_val in attrs.items():
                h5[ptx]['subsetting'][k].attrs[att_name] = att_val

    #-- HDF5 file title
    fileID.attrs['featureType'] = 'trajectory'
    fileID.attrs['title'] = 'ATLAS/ICESat-2 Land Ice Height'
    fileID.attrs['summary'] = (
        'Subsetting masks and geophysical parameters '
        'for land ice segments needed to interpret and assess the quality '
        'of annual land height estimates.')
    fileID.attrs['description'] = (
        'Land ice parameters for each beam pair. '
        'All parameters are calculated for the same along-track increments '
        'for each beam pair and repeat.')
    date_created = datetime.datetime.today()
    fileID.attrs['date_created'] = date_created.isoformat()
    project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
    fileID.attrs['project'] = project
    platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
    fileID.attrs['project'] = platform
    #-- add attribute for elevation instrument and designated processing level
    instrument = 'ATLAS > Advanced Topographic Laser Altimeter System'
    fileID.attrs['instrument'] = instrument
    fileID.attrs['source'] = 'Spacecraft'
    fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2'
    fileID.attrs['processing_level'] = '4'
    #-- add attributes for input ATL11 files
    fileID.attrs['input_files'] = ','.join(
        [os.path.basename(i) for i in INPUT])
    #-- find geospatial and temporal ranges
    lnmn, lnmx, ltmn, ltmx, tmn, tmx = (np.inf, -np.inf, np.inf, -np.inf,
                                        np.inf, -np.inf)
    for ptx in pairs:
        lon = IS2_atl11_mask[ptx]['longitude']
        lat = IS2_atl11_mask[ptx]['latitude']
        delta_time = IS2_atl11_mask[ptx]['delta_time']
        valid = np.nonzero(delta_time != FILL_VALUE[ptx]['delta_time'])
        #-- setting the geospatial and temporal ranges
        lnmn = lon.min() if (lon.min() < lnmn) else lnmn
        lnmx = lon.max() if (lon.max() > lnmx) else lnmx
        ltmn = lat.min() if (lat.min() < ltmn) else ltmn
        ltmx = lat.max() if (lat.max() > ltmx) else ltmx
        tmn = delta_time[valid].min() if (
            delta_time[valid].min() < tmn) else tmn
        tmx = delta_time[valid].max() if (
            delta_time[valid].max() > tmx) else tmx
    #-- add geospatial and temporal attributes
    fileID.attrs['geospatial_lat_min'] = ltmn
    fileID.attrs['geospatial_lat_max'] = ltmx
    fileID.attrs['geospatial_lon_min'] = lnmn
    fileID.attrs['geospatial_lon_max'] = lnmx
    fileID.attrs['geospatial_lat_units'] = "degrees_north"
    fileID.attrs['geospatial_lon_units'] = "degrees_east"
    fileID.attrs['geospatial_ellipsoid'] = "WGS84"
    fileID.attrs['date_type'] = 'UTC'
    fileID.attrs['time_type'] = 'CCSDS UTC-A'
    #-- convert start and end time from ATLAS SDP seconds into UTC time
    time_utc = convert_delta_time(np.array([tmn, tmx]))
    #-- convert to calendar date
    YY, MM, DD, HH, MN, SS = icesat2_toolkit.time.convert_julian(
        time_utc['julian'], FORMAT='tuple')
    #-- add attributes with measurement date start, end and duration
    tcs = datetime.datetime(np.int(YY[0]), np.int(MM[0]), np.int(DD[0]),
                            np.int(HH[0]), np.int(MN[0]), np.int(SS[0]),
                            np.int(1e6 * (SS[0] % 1)))
    fileID.attrs['time_coverage_start'] = tcs.isoformat()
    tce = datetime.datetime(np.int(YY[1]), np.int(MM[1]), np.int(DD[1]),
                            np.int(HH[1]), np.int(MN[1]), np.int(SS[1]),
                            np.int(1e6 * (SS[1] % 1)))
    fileID.attrs['time_coverage_end'] = tce.isoformat()
    fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx - tmn)
    #-- Closing the HDF5 file
    fileID.close()