コード例 #1
0
def add_geoid_layer(dataFolder,
                    year,
                    fileIndex,
                    resolution,
                    projection,
                    geoidGeometry,
                    printStatus=True):
    if printStatus:
        print("    Step 5: Adding geoid layer to file...")

    if 'EPSG' in projection:
        baseDirectory = 'Resampled_' + str(
            resolution) + 'm_' + projection.split(':')[1]
    else:
        baseDirectory = 'Resampled_' + str(resolution) + 'm'

    regridded_folder = os.path.join(
        dataFolder, baseDirectory,
        'OMG_Ice_GLISTIN-A_L3_' + '{:02d}'.format(fileIndex))
    regridded_file = ref.indexAndYearToFileID(fileIndex, year) + '.nc'
    regridded_filepath = os.path.join(regridded_folder, regridded_file)

    print('        Reading the data geometry')
    dataset = xr.open_dataset(regridded_filepath)
    lon, lat, espg = read_lat_long(dataset)

    print('        Finding a geoid subset around the swath')
    # step 3: find geoid subset around the swath extent
    gXsubset, gYsubset, geoidSubset = create_geoid_subset(
        geoidGeometry, lon, lat)

    print('        Making an interpolation grid for the geoid')
    # step 4: make an interpolation function for the geoid subset
    set_int = interp2d(gXsubset, gYsubset, geoidSubset)

    print('        Interpolating the geoid onto the grid points')
    # step 5: for each point in the swath, find the value in the geoid grid
    geoid = interpolate_geoid_on_swath(lon, lat, set_int)

    print('        Geoid added to the dataset')
    dataset['geoid'] = (['y', 'x'], geoid)
    os.remove(
        regridded_filepath
    )  #remove the old file so that the new one can be saved in its place
    dataset.to_netcdf(regridded_filepath)
コード例 #2
0
def generateDownloadLinks(swathIndices,yearList):
    dataDownloadLinks=[]
    metadataDownloadLinks=[]
    years=[]
    swathIDs=[]
    swathIndexLong=[]
    for index in swathIndices:
        for year in yearList:
            if year==2016 and index in ref.swathIndicesMissingIn2016():
                print('Note: Swath index ' + str(index) + ' is not available in 2016')
            else:
                fileID = ref.indexAndYearToFileID(index, year)
                years.append(year)
                swathID = ref.fileNameToSwathID(fileID)
                swathIDs.append(swathID)
                dataLink = ref.swathIDtoDataLink(swathID)
                metadatalink = ref.swathIDtoMetadataLink(swathID)
                dataDownloadLinks.append(dataLink)
                metadataDownloadLinks.append(metadatalink)
                swathIndexLong.append(index) 
    return(dataDownloadLinks,metadataDownloadLinks,years,swathIDs,swathIndexLong)
コード例 #3
0
def save_resample(dataFolder, year, fileIndex, resolution, projection, x, y,
                  output_grid, stdev_grid, count_grid, lon_grid, lat_grid):

    if 'EPSG:326' in projection:
        baseDirectory = 'Resampled_' + str(resolution) + 'm'
    else:
        baseDirectory = 'Resampled_' + str(
            resolution) + 'm_' + projection.split(':')[1]

#    output_folder = os.path.join(dataFolder,baseDirectory,'OMG_Ice_GLISTIN-A_L3_'+ '{:02d}'.format(fileIndex))
    output_folder = dataFolder.joinpath(
        baseDirectory, 'OMG_Ice_GLISTIN-A_L3_' + '{:02d}'.format(fileIndex))

    output_file = ref.indexAndYearToFileID(fileIndex, year) + '.nc'

    #    if output_file in os.listdir(output_folder):
    #        os.remove(os.path.join(output_folder,output_file))
    if output_folder.joinpath(output_file).is_file():
        print('        Removing old version of ' + output_file)
        output_folder.joinpath(output_file).unlink()


    swath = xr.Dataset( \
        {'elevation': (['y', 'x'], output_grid),
        'standard_deviation': (['y', 'x'], stdev_grid),
        'count': (['y', 'x'], count_grid),
         'projection':'',
         'longitude': (('y', 'x'), lon_grid),
         'latitude': (('y', 'x'), lat_grid)}, \
        coords={'y': y, \
                'x': x })

    swath['projection'].attrs['EPSG'] = projection

    #swath.to_netcdf(os.path.join(output_folder,output_file))
    print('        Saving to NetCDF ' + str(output_file))
    swath.to_netcdf(str(output_folder.joinpath(output_file)))
コード例 #4
0
def add_DEM_metadata(dataFolder,
                     year,
                     fileIndex,
                     resolution,
                     projection,
                     addGeoid,
                     printStatus=True):
    if printStatus:
        print("    Step 6: Adding DEM metadata to file...")

    if 'EPSG' in projection:
        baseDirectory = 'Resampled_' + str(
            resolution) + 'm_' + projection.split(':')[1]
    else:
        baseDirectory = 'Resampled_' + str(resolution) + 'm'

    #regridded_folder = os.path.join(dataFolder, baseDirectory,'OMG_Ice_GLISTIN-A_L3_' + '{:02d}'.format(fileIndex))
    regridded_folder = dataFolder.joinpath(
        baseDirectory, 'OMG_Ice_GLISTIN-A_L3_' + '{:02d}'.format(fileIndex))

    regridded_file = ref.indexAndYearToFileID(fileIndex, year) + '.nc'
    #regridded_filepath = os.path.join(regridded_folder,regridded_file)

    regridded_filepath = regridded_folder.joinpath(regridded_file)

    #step 1: read in the variables from the regridded file
    variables, variableNames, coordinates, coordinateNames, projection = read_regridded_swath(
        regridded_filepath)

    #step 2: generate a new swath with the variable and coordinates
    dataset = generate_new_dataset(variables, variableNames, coordinates,
                                   coordinateNames, projection, addGeoid)

    #step 3: add in the global metadata values
    dataset = write_global_attributes(dataset, dataFolder, regridded_filepath,
                                      resolution, variables, variableNames,
                                      coordinates, coordinateNames, projection)

    #step 4: add in the global metadata values
    dataset = write_variables_attributes(dataset, variables, variableNames,
                                         coordinates, coordinateNames,
                                         projection, addGeoid)

    #step 5: add in the projection information
    dataset = write_projection_attributes(dataset, coordinates,
                                          coordinateNames, projection,
                                          resolution)

    #step 6: add in the coordinate attributes
    dataset = write_coordinates_attributes(dataset, coordinates,
                                           coordinateNames)

    #step 7: add in the geoid attributes if geoid is added
    if addGeoid:
        dataset = write_geoid_attributes(dataset, variables, variableNames,
                                         projection)

#    os.remove(regridded_filepath)  # remove the old file so that the new one can be saved in its place
    try:
        regridded_filepath.unlink()
    except:
        print('could not unlink ' + str(regridded_filepath))
        print(' ... aborting!')
        exit()

    print("        Saving updated NetCDF file.")
    dataset.to_netcdf(str(regridded_filepath))
コード例 #5
0
def read_swath_and_create_geometry(dataFolder, fileIndex, year, printStatus):
    if printStatus:
        print(
            '    Step 2: Reading in the binary grid and creating the original geometry'
        )
        print('        Reading in binary data from file')

    fileID = ref.indexAndYearToFileID(fileIndex, year)
    metadata_dictionary = read_metadata_dictionary(dataFolder, fileID)

    swathID = ref.fileNameToSwathID(fileID)
    dataPath = dataFolder.joinpath('Raw', str(year), 'Data',
                                   swathID + '.hgt.grd')

    g = np.fromfile(str(dataPath), dtype='<f4')

    # cast to float 2
    g = g.astype(np.dtype('<f2'))

    grid = np.reshape(g, (metadata_dictionary['GRD Latitude Lines'],
                          metadata_dictionary['GRD Longitude Samples']))
    if printStatus:
        print('        Preparing original geometry of the swath from metadata')
    grid = np.where(grid > grid.min(), grid, np.nan)

    min_swath_lon = metadata_dictionary['GRD Starting Longitude']
    max_swath_lon = metadata_dictionary['GRD Starting Longitude'] + metadata_dictionary['GRD Longitude Samples'] * \
                    metadata_dictionary['GRD Longitude Spacing']
    min_swath_lat = metadata_dictionary['GRD Starting Latitude'] + metadata_dictionary['GRD Latitude Lines'] * \
                    metadata_dictionary['GRD Latitude Spacing']
    max_swath_lat = metadata_dictionary['GRD Starting Latitude']

    lats = np.linspace(min_swath_lat, max_swath_lat,
                       metadata_dictionary['GRD Latitude Lines'])
    lons = np.linspace(min_swath_lon, max_swath_lon,
                       metadata_dictionary['GRD Longitude Samples'])

    grid = np.flipud(grid)

    if printStatus:
        # print("            The grid shape is (" + str(len(lats)) + "," + str(len(lons)) + ")")
        print("            The grid shape is (" + str(np.shape(grid)[0]) +
              "," + str(np.shape(grid)[1]) + ")")

    # Original Area definition in swath geometry:
    Lons, Lats = np.meshgrid(lons, lats)
    Lons = np.reshape(Lons, (np.size(Lons), ))
    Lats = np.reshape(Lats, (np.size(Lats), ))
    grid = np.reshape(grid, (np.size(grid), ))

    # Remove nans so averaging is ubiquitous
    non_nans = np.invert(np.isnan(grid))
    if printStatus:
        print('            Removed ' + str(np.sum(np.isnan(grid))) +
              ' nan points out of ' + str(np.size(grid)) + ' grid points')
    Lons = Lons[non_nans]
    Lats = Lats[non_nans]
    grid = grid[non_nans]

    area_original = geometry.SwathDefinition(lons=Lons, lats=Lats)
    return (area_original, grid)
コード例 #6
0
def find_common_index_extent(dataFolder,
                             fileIndex,
                             printStatus,
                             useMetadata=False):
    if useMetadata:
        if printStatus:
            print(
                '    Step 1: Finding a common extent for all DEMs with index '
                + str(fileIndex))

        min_lon = 360
        max_lon = -360
        min_lat = 90
        max_lat = -90
        addFileData = True
        for year in [2016, 2017, 2018, 2019]:
            if year == 2016:
                if fileIndex in ref.fileIndicesMissingIn2016():
                    addFileData = False
            else:
                addFileData = True
            if addFileData:
                fileID = ref.indexAndYearToFileID(fileIndex, year)
                metadata_dictionary = read_metadata_dictionary(
                    dataFolder, fileID)

                min_swath_lon = metadata_dictionary['GRD Starting Longitude']
                max_swath_lon = metadata_dictionary[
                    'GRD Starting Longitude'] + metadata_dictionary[
                        'GRD Longitude Samples'] * metadata_dictionary[
                            'GRD Longitude Spacing']
                min_swath_lat = metadata_dictionary[
                    'GRD Starting Latitude'] + metadata_dictionary[
                        'GRD Latitude Lines'] * metadata_dictionary[
                            'GRD Latitude Spacing']
                max_swath_lat = metadata_dictionary['GRD Starting Latitude']
                min_lon = np.min([min_lon, min_swath_lon])
                max_lon = np.max([max_lon, max_swath_lon])
                min_lat = np.min([min_lat, min_swath_lat])
                max_lat = np.max([max_lat, max_swath_lat])

        if printStatus:
            print('        Longitude extents -> Min: ' +
                  '{:.06f}'.format(min_lon) + '  Max: ' +
                  '{:.06f}'.format(max_lon))
            print('        Latitude extents  -> Min:  ' +
                  '{:.06f}'.format(min_lat) + '  Max:  ' +
                  '{:.06f}'.format(max_lat))
    else:
        if printStatus:
            print(
                '    Step 1: Finding a common extent for all DEMs with index '
                + str(fileIndex))

        saved_extent = ref.indexToCommonExtent(fileIndex)
        min_lon = saved_extent[0]
        max_lon = saved_extent[1]
        min_lat = saved_extent[2]
        max_lat = saved_extent[3]

        if printStatus:
            print('        Longitude extents -> Min: ' +
                  '{:.06f}'.format(min_lon) + '  Max: ' +
                  '{:.06f}'.format(max_lon))
            print('        Latitude extents  -> Min:  ' +
                  '{:.06f}'.format(min_lat) + '  Max:  ' +
                  '{:.06f}'.format(max_lat))

    return (min_lon, max_lon, min_lat, max_lat)