Exemplo n.º 1
0
def convert_delta_time(delta_time, gps_epoch=1198800018.0):
    """
    converts ICESat-2 delta_times into into Julian and year-decimal

    Arguments
    ---------
    delta_time: seconds since gps_epoch

    Keyword arguments
    -----------------
    gps_epoch: seconds between delta_time and GPS epoch (1980-01-06T00:00:00)

    Returns
    -------
    julian: time in Julian days
    decimal: time in year-decimal
    """
    #-- convert to array if single value
    if (np.ndim(delta_time) == 0):
        delta_time = np.array([delta_time])
    #-- calculate gps time from delta_time
    gps_seconds = gps_epoch + delta_time
    time_leaps = count_leap_seconds(gps_seconds)
    #-- calculate Julian time (UTC)
    time_julian = 2444244.5 + (gps_seconds - time_leaps)/86400.0
    #-- convert to calendar date with convert_julian.py
    Y,M,D,h,m,s = convert_julian(time_julian,FORMAT='tuple')
    #-- calculate year-decimal time (UTC)
    time_decimal = convert_calendar_decimal(Y,M,DAY=D,HOUR=h,MINUTE=m,SECOND=s)
    #-- return both the Julian and year-decimal formatted dates
    return dict(julian=np.squeeze(time_julian),decimal=np.squeeze(time_decimal))
Exemplo n.º 2
0
def HDF5_ATL03_dem_write(IS2_atl03_dem, IS2_atl03_attrs, INPUT=None,
    FILENAME='', FILL_VALUE=None, CLOBBER=True):
    #-- setting HDF5 clobber attribute
    if CLOBBER:
        clobber = 'w'
    else:
        clobber = 'w-'

    #-- open output HDF5 file
    fileID = h5py.File(os.path.expanduser(FILENAME), clobber)

    #-- create HDF5 records
    h5 = {}

    #-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC)
    #-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC)
    h5['ancillary_data'] = {}
    for k,v in IS2_atl03_dem['ancillary_data'].items():
        #-- Defining the HDF5 dataset variables
        val = 'ancillary_data/{0}'.format(k)
        h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v,
            dtype=v.dtype, compression='gzip')
        #-- add HDF5 variable attributes
        for att_name,att_val in IS2_atl03_attrs['ancillary_data'][k].items():
            h5['ancillary_data'][k].attrs[att_name] = att_val

    #-- write each output beam
    beams = [k for k in IS2_atl03_dem.keys() if bool(re.match(r'gt\d[lr]',k))]
    for gtx in beams:
        fileID.create_group(gtx)
        #-- add HDF5 group attributes for beam
        for att_name in ['Description','atlas_pce','atlas_beam_type',
            'groundtrack_id','atmosphere_profile','atlas_spot_number',
            'sc_orientation']:
            fileID[gtx].attrs[att_name] = IS2_atl03_attrs[gtx][att_name]
        #-- create heights group
        fileID[gtx].create_group('heights')
        h5[gtx] = dict(heights={})
        for att_name in ['Description','data_rate']:
            att_val = IS2_atl03_attrs[gtx]['heights'][att_name]
            fileID[gtx]['heights'].attrs[att_name] = att_val

        #-- delta_time
        v = IS2_atl03_dem[gtx]['heights']['delta_time']
        attrs = IS2_atl03_attrs[gtx]['heights']['delta_time']
        #-- Defining the HDF5 dataset variables
        val = '{0}/{1}/{2}'.format(gtx,'heights','delta_time')
        h5[gtx]['heights']['delta_time'] = fileID.create_dataset(val,
            np.shape(v), data=v, dtype=v.dtype, compression='gzip')
        #-- add HDF5 variable attributes
        for att_name,att_val in attrs.items():
            h5[gtx]['heights']['delta_time'].attrs[att_name] = att_val

        #-- geolocation and height variables
        for k in ['latitude','longitude','dem_h']:
            #-- values and attributes
            v = IS2_atl03_dem[gtx]['heights'][k]
            attrs = IS2_atl03_attrs[gtx]['heights'][k]
            fillvalue = FILL_VALUE[gtx]['heights'][k]
            #-- Defining the HDF5 dataset variables
            val = '{0}/{1}/{2}'.format(gtx,'heights',k)
            if fillvalue:
                h5[gtx]['heights'][k] = fileID.create_dataset(val, np.shape(v),
                    data=v,dtype=v.dtype,fillvalue=fillvalue,compression='gzip')
            else:
                h5[gtx]['heights'][k] = fileID.create_dataset(val, np.shape(v),
                    data=v, dtype=v.dtype, compression='gzip')
            #-- attach dimensions
            for dim in ['delta_time']:
                h5[gtx]['heights'][k].dims.create_scale(
                    h5[gtx]['heights'][dim], dim)
                h5[gtx]['heights'][k].dims[0].attach_scale(
                    h5[gtx]['heights'][dim])
            #-- add HDF5 variable attributes
            for att_name,att_val in attrs.items():
                h5[gtx]['heights'][k].attrs[att_name] = att_val

        #-- create subsetting group
        fileID[gtx].create_group('subsetting')
        h5[gtx]['subsetting'] = {}
        for att_name in ['Description','data_rate']:
            att_val = IS2_atl03_attrs[gtx]['subsetting'][att_name]
            fileID[gtx]['subsetting'].attrs[att_name] = att_val
        #-- add to subsetting variables
        for k,v in IS2_atl03_dem[gtx]['subsetting'].items():
            #-- attributes
            attrs = IS2_atl03_attrs[gtx]['subsetting'][k]
            #-- Defining the HDF5 dataset variables
            val = '{0}/{1}/{2}'.format(gtx,'subsetting',k)
            h5[gtx]['subsetting'][k] = fileID.create_dataset(val, np.shape(v),
                data=v, dtype=v.dtype, compression='gzip')
            #-- attach dimensions
            for dim in ['delta_time']:
                h5[gtx]['subsetting'][k].dims.create_scale(
                    h5[gtx]['heights'][dim], dim)
                h5[gtx]['subsetting'][k].dims[0].attach_scale(
                    h5[gtx]['heights'][dim])
            #-- add HDF5 variable attributes
            for att_name,att_val in attrs.items():
                h5[gtx]['subsetting'][k].attrs[att_name] = att_val

    #-- HDF5 file title
    fileID.attrs['featureType'] = 'trajectory'
    fileID.attrs['title'] = 'ATLAS/ICESat-2 L2A Global Geolocated Photon Data'
    fileID.attrs['summary'] = ("The purpose of ATL03 is to provide along-track "
        "photon data for all 6 ATLAS beams and associated statistics.")
    fileID.attrs['description'] = ("Photon heights determined by ATBD "
        "Algorithm using POD and PPD. All photon events per transmit pulse per "
        "beam. Includes POD and PPD vectors. Classification of each photon by "
        "several ATBD Algorithms.")
    date_created = datetime.datetime.today()
    fileID.attrs['date_created'] = date_created.isoformat()
    project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
    fileID.attrs['project'] = project
    platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
    fileID.attrs['project'] = platform
    #-- add attribute for elevation instrument and designated processing level
    instrument = 'ATLAS > Advanced Topographic Laser Altimeter System'
    fileID.attrs['instrument'] = instrument
    fileID.attrs['source'] = 'Spacecraft'
    fileID.attrs['references'] = 'http://nsidc.org/data/icesat2/data.html'
    fileID.attrs['processing_level'] = '4'
    #-- add attributes for input ATL03 and ATL09 files
    fileID.attrs['input_files'] = ','.join([os.path.basename(i) for i in INPUT])
    #-- find geospatial and temporal ranges
    lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf)
    for gtx in beams:
        lon = IS2_atl03_dem[gtx]['heights']['longitude']
        lat = IS2_atl03_dem[gtx]['heights']['latitude']
        delta_time = IS2_atl03_dem[gtx]['heights']['delta_time']
        #-- setting the geospatial and temporal ranges
        lnmn = lon.min() if (lon.min() < lnmn) else lnmn
        lnmx = lon.max() if (lon.max() > lnmx) else lnmx
        ltmn = lat.min() if (lat.min() < ltmn) else ltmn
        ltmx = lat.max() if (lat.max() > ltmx) else ltmx
        tmn = delta_time.min() if (delta_time.min() < tmn) else tmn
        tmx = delta_time.max() if (delta_time.max() > tmx) else tmx
    #-- add geospatial and temporal attributes
    fileID.attrs['geospatial_lat_min'] = ltmn
    fileID.attrs['geospatial_lat_max'] = ltmx
    fileID.attrs['geospatial_lon_min'] = lnmn
    fileID.attrs['geospatial_lon_max'] = lnmx
    fileID.attrs['geospatial_lat_units'] = "degrees_north"
    fileID.attrs['geospatial_lon_units'] = "degrees_east"
    fileID.attrs['geospatial_ellipsoid'] = "WGS84"
    fileID.attrs['date_type'] = 'UTC'
    fileID.attrs['time_type'] = 'CCSDS UTC-A'
    #-- convert start and end time from ATLAS SDP seconds into Julian days
    atlas_sdp_gps_epoch=IS2_atl03_dem['ancillary_data']['atlas_sdp_gps_epoch']
    gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx])
    time_leaps = count_leap_seconds(gps_seconds)
    time_julian = 2444244.5 + (gps_seconds - time_leaps)/86400.0
    #-- convert to calendar date with convert_julian.py
    YY,MM,DD,HH,MN,SS = convert_julian(time_julian,FORMAT='tuple')
    #-- add attributes with measurement date start, end and duration
    tcs = datetime.datetime(np.int(YY[0]), np.int(MM[0]), np.int(DD[0]),
        np.int(HH[0]), np.int(MN[0]), np.int(SS[0]), np.int(1e6*(SS[0] % 1)))
    fileID.attrs['time_coverage_start'] = tcs.isoformat()
    tce = datetime.datetime(np.int(YY[1]), np.int(MM[1]), np.int(DD[1]),
        np.int(HH[1]), np.int(MN[1]), np.int(SS[1]), np.int(1e6*(SS[1] % 1)))
    fileID.attrs['time_coverage_end'] = tce.isoformat()
    fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn)
    #-- Closing the HDF5 file
    fileID.close()