Exemple #1
0
def obs_export(obs_file,pathlength,sensor_az,sensor_zn,solar_az,solar_zn,phase,slope,aspect,cosine_i,utc_time):
    '''Export observables datasets to disk
    '''
    obs_header = envi_header_dict()
    obs_header['lines']= pathlength.shape[0]
    obs_header['samples']= pathlength.shape[1]
    obs_header['bands']= 10
    obs_header['interleave']= 'bil'
    obs_header['data type'] = 4
    obs_header['byte order'] = 0
    obs_header['band names'] = ['path length', 'to-sensor azimuth',
                                'to-sensor zenith','to-sun azimuth',
                                  'to-sun zenith','phase', 'slope',
                                  'aspect', 'cosine i','UTC time']

    writer = WriteENVI(obs_file,obs_header)
    writer.write_band(pathlength,0)
    writer.write_band(sensor_az,1)
    writer.write_band(sensor_zn,2)
    writer.write_band(solar_az,3)
    writer.write_band(solar_zn,4)
    writer.write_band(phase,5)
    writer.write_band(slope,6)
    writer.write_band(aspect,7)
    writer.write_band(cosine_i,8)
    writer.write_band(utc_time,9)
Exemple #2
0
def slope_aspect(elevation, temp_dir):
    ''' Use GDAL to calculte slope and aspect
    '''

    dem_dict = envi_header_dict()
    dem_dict['lines'] = elevation.shape[0]
    dem_dict['samples'] = elevation.shape[1]
    dem_dict['bands'] = 1
    dem_dict['interleave'] = 'bsq'
    dem_dict['data type'] = 4

    dem_file = '%stemp_dem_clip' % temp_dir
    writer = WriteENVI(dem_file, dem_dict)
    writer.write_band(elevation, 0)

    slope_file = '%s_slope' % temp_dir
    aspect_file = '%s_aspect' % temp_dir

    logging.info('Calculating slope')
    os.system('gdaldem slope -of ENVI %s %s' % (dem_file, slope_file))

    logging.info('Calculating aspect')
    os.system('gdaldem aspect -f ENVI %s %s' % (dem_file, aspect_file))

    asp_obj = ht.HyTools()
    asp_obj.read_file(aspect_file, 'envi')
    aspect = asp_obj.get_band(0)

    slp_obj = ht.HyTools()
    slp_obj.read_file(slope_file, 'envi')
    slope = slp_obj.get_band(0)

    return slope, aspect
Exemple #3
0
def loc_export(loc_file,longitude,latitude,elevation):
    '''Export location datasets to disk
    '''
    loc_header = envi_header_dict()
    loc_header['lines']= longitude.shape[0]
    loc_header['samples']= longitude.shape[1]
    loc_header['bands']= 3
    loc_header['interleave']= 'bil'
    loc_header['data type'] = 4
    loc_header['band names'] = ['longitude', 'latitude','elevation']
    loc_header['byte order'] = 0

    writer = WriteENVI(loc_file,loc_header)
    writer.write_band(longitude,0)
    writer.write_band(latitude,1)
    writer.write_band(elevation,2)
Exemple #4
0
def he5_to_envi(l1_zip,out_dir,temp_dir,elev_dir,shift = False, rad_coeff = None,
                match=False,proj = False,res = 30):
    '''
    This function exports three files:
        *_rdn* : Merged and optionally shift corrected radiance cube
        *_obs* : Observables file in the format of JPL obs files:
                1. Pathlength (m)
                2. To-sensor view azimuth angle (degrees)
                3. To-sensor view zenith angle (degrees)
                4. To-sun azimuth angle (degrees)
                5. To-sun zenith angle (degrees)
                6. Phase
                7. Slope (Degrees)
                8. Aspect (Degrees)
                9. Cosine i
                10. UTC decimal hours
        *_loc* : Location file in the following format:
                1. Longitude (decimal degrees)
                2. Longitude (decimal degrees)
                3. Elevation (m)

    l1(str): L1 zipped radiance data product path
    out_dir(str): Output directory of ENVI datasets
    temp_dir(str): Temporary directory for intermediate
    elev_dir (str): Directory zipped Copernicus elevation tiles or url to AWS Copernicus data
                    ex : 'https://copernicus-dem-30m.s3.amazonaws.com/'
    shift (bool) : Apply wavelength shift correction surface file
    rad_coeff (bool) : Apply radiometric correction coefficients file
    match (bool or string) : Perform landsat image matching, if string path to reference file
    proj (bool) : Project image to UTM grid
    res (int) : Resolution of projected image, 30 should be one of its factors (90,120,150.....)
    '''

    base_name = os.path.basename(l1_zip)[16:-4]
    out_dir = "%s/PRS_%s/" % (out_dir,base_name)

    if not os.path.isdir(out_dir):
        os.mkdir(out_dir)

    logging.basicConfig(filename='%s/PRS_%s.log' % (out_dir,base_name),
            format='%(asctime)s: %(levelname)s - %(message)s',
            datefmt='%Y-%m-%d %H:%M:%S',
            level=logging.NOTSET)

    temp_dir = '%s/tmpPRS_%s/'% (temp_dir,base_name)
    if not os.path.isdir(temp_dir):
        os.mkdir(temp_dir)

    zip_base  =os.path.basename(l1_zip)
    logging.info('Unzipping %s' % zip_base)
    with zipfile.ZipFile(l1_zip,'r') as zipped:
        zipped.extractall(temp_dir)

    l1_obj = h5py.File('%sPRS_L1_STD_OFFL_%s.he5' % (temp_dir,base_name),'r')

    if shift:
        shift_file = importlib.resources.open_binary(data,"PRS_20210409105743_20210409105748_0001_wavelength_shift_surface.npz")
        shift_obj = np.load(shift_file)
        shift_surface = shift_obj['shifts']
        #interp_kind = shift_obj['interp_kind']
        interp_kind='quadratic'

    coeff_arr = np.ones((996, 230))

    if rad_coeff != None:
        coeff_file = importlib.resources.open_binary(data,"PRS_20210409105743_20210409105748_0001_radcoeff_surface.npz")
        coeff_obj = np.load(coeff_file)

        if rad_coeff == 'full':
            coeff_arr = coeff_obj['coeffs']
        elif rad_coeff == 'mean':
            coeff_arr[:] = coeff_obj['coeffs'].mean(axis=0)
        elif rad_coeff == 'center':
            coeff_arr[:] = coeff_obj['coeffs'][498-25:498+25].mean(axis=0)
        else:
            print('Unrecognized coeff type')


    #Define output paths
    if proj:
        rdn_file = '%sPRS_%s_rdn' % (temp_dir,base_name)
        loc_file = '%sPRS_%s_loc' % (temp_dir,base_name)
        obs_file = '%sPRS_%s_obs' % (temp_dir,base_name)
    else:
        rdn_file = '%sPRS_%s_rdn' % (out_dir,base_name)
        loc_file = '%sPRS_%s_loc' % (out_dir,base_name)
        obs_file = '%sPRS_%s_obs' % (out_dir,base_name)

    measurement = 'rdn'
    logging.info('Exporting radiance data')

    # Export VNIR to temporary ENVI
    vnir_data =  l1_obj['HDFEOS']["SWATHS"]['PRS_L1_HCO']['Data Fields']['VNIR_Cube']
    vnir_waves = l1_obj.attrs.get('List_Cw_Vnir')
    vnir_fwhm = l1_obj.attrs.get('List_Fwhm_Vnir')

    rdn_dict = envi_header_dict ()
    rdn_dict['lines']= vnir_data.shape[0]
    rdn_dict['samples']= vnir_data.shape[2]
    rdn_dict['bands']=  vnir_data.shape[1]
    rdn_dict['wavelength']= vnir_waves
    rdn_dict['fwhm']= vnir_fwhm
    rdn_dict['interleave']= 'bsq'
    rdn_dict['data type'] = 12
    rdn_dict['wavelength units'] = "nanometers"
    rdn_dict['byte order'] = 0
    vnir_temp = '%sPRS_%s_%s_vnir' % (temp_dir,base_name,measurement)

    writer = WriteENVI(vnir_temp,rdn_dict )
    writer.write_chunk(np.moveaxis(vnir_data[:,:,:],1,2), 0,0)

    # Export SWIR to temporary ENVI
    swir_data =  l1_obj['HDFEOS']["SWATHS"]['PRS_L1_HCO']['Data Fields']['SWIR_Cube']
    swir_waves = l1_obj.attrs.get('List_Cw_Swir')
    swir_fwhm = l1_obj.attrs.get('List_Fwhm_Swir')

    rdn_dict['lines']= swir_data.shape[0]
    rdn_dict['samples']= swir_data.shape[2]
    rdn_dict['bands']=  swir_data.shape[1]
    rdn_dict['wavelength']= swir_waves
    rdn_dict['fwhm']= swir_fwhm
    swir_temp = '%sPRS_%s_%s_swir' % (temp_dir,base_name,measurement)

    writer = WriteENVI(swir_temp,rdn_dict )
    writer.write_chunk(np.moveaxis(swir_data[:,:,:],1,2), 0,0)

    vnir_waves = np.flip(vnir_waves[3:]) #6
    swir_waves = np.flip(swir_waves[:-6]) #-3

    vnir_fwhm = np.flip(vnir_fwhm[3:])
    swir_fwhm = np.flip(swir_fwhm[:-6])

    vnir_obj = ht.HyTools()
    vnir_obj.read_file(vnir_temp, 'envi')

    swir_obj = ht.HyTools()
    swir_obj.read_file(swir_temp, 'envi')

    rdn_dict  = envi_header_dict()
    rdn_dict ['lines']= vnir_obj.lines-4 #Clip edges of array
    rdn_dict ['samples']=vnir_obj.columns-4  #Clip edges of array
    rdn_dict ['bands']= len(vnir_waves.tolist() + swir_waves.tolist())
    rdn_dict ['wavelength']= vnir_waves.tolist() + swir_waves.tolist()
    rdn_dict ['fwhm']= vnir_fwhm.tolist() + swir_fwhm.tolist()
    rdn_dict ['interleave']= 'bil'
    rdn_dict ['data type'] = 4
    rdn_dict ['wavelength units'] = "nanometers"
    rdn_dict ['byte order'] = 0
    rdn_dict ['default bands'] = [int(vnir_obj.wave_to_band(660)),
                                  int(vnir_obj.wave_to_band(560)),
                                  int(vnir_obj.wave_to_band(460))]

    writer = WriteENVI(rdn_file,rdn_dict)
    iterator_v =vnir_obj.iterate(by = 'line')
    iterator_s =swir_obj.iterate(by = 'line')

    while not iterator_v.complete:
        chunk_v = iterator_v.read_next()[:,3:]
        chunk_v =np.flip(chunk_v,axis=1)
        chunk_s = iterator_s.read_next()[:,:-6]
        chunk_s =np.flip(chunk_s,axis=1)

        if (iterator_v.current_line >=2) and (iterator_v.current_line <= 997):
            if (measurement == 'rdn') & shift:
                vnir_interpolator = interp1d(vnir_waves+shift_surface[iterator_v.current_line-2,:63],
                                               chunk_v[2:-2,:],fill_value = "extrapolate",kind=interp_kind)
                chunk_v = vnir_interpolator(vnir_waves)
                swir_interpolator = interp1d(swir_waves+shift_surface[iterator_v.current_line-2,63:],
                                               chunk_s[2:-2,:],fill_value = "extrapolate",kind=interp_kind)
                chunk_s = swir_interpolator(swir_waves)

                line = np.concatenate([chunk_v,chunk_s],axis=1)/1000.

            else:
                line = np.concatenate([chunk_v,chunk_s],axis=1)[2:-2,:]/1000.

            #Apply rad coeffs
            line*=coeff_arr[iterator_v.current_line-2,:]

            writer.write_line(line, iterator_v.current_line-2)

    #Load ancillary datasets
    geo =  l1_obj['HDFEOS']["SWATHS"]['PRS_L1_HCO']['Geolocation Fields']
    pvs =  l1_obj['Info']["Ancillary"]['PVSdata']

    # Time
    '''1. Convert from MJD2000 to UTC hours
       2. Fit line to estimate continous time.
    '''

    def dhour(day):
        epoch = dt.datetime(2000,1, 1,)
        epoch = epoch.replace(tzinfo=dt.timezone.utc)

        hour =  (day-day//1)*24
        minute =  (hour-hour//1)*60
        second= (minute-minute//1)*60
        microsecond= (second-second//1)*1000000
        time = epoch + dt.timedelta(days=day//1,hours=hour//1,
                                    minutes=minute//1,seconds=second,
                                    microseconds =microsecond)
        return time.hour + time.minute/60. + time.second/3600.

    v_dhour = np.vectorize(dhour)
    utc_time = v_dhour(np.array(geo['Time'][:]))
    utc_time = np.ones(geo['Longitude_VNIR'][:,:].shape[0]) *utc_time[:,np.newaxis]
    utc_time = utc_time[2:-2,2:-2]

    # Solar geometries
    '''Solar geometry is calculated based on the mean scene acquisition time
    which varies by less than 5 seconds from start to end of the scene and is
    computationally more efficient.
    '''
    mjd2000_epoch = dt.datetime(2000,1, 1,)
    mjd2000_epoch = mjd2000_epoch.replace(tzinfo=dt.timezone.utc)
    mean_time = mjd2000_epoch + dt.timedelta(days=np.array(geo['Time'][:]).mean())

    solar_az = solar.get_azimuth(geo['Latitude_VNIR'][:,:],geo['Longitude_VNIR'][:,:],mean_time)[2:-2,2:-2]
    solar_zn = 90-solar.get_altitude(geo['Latitude_VNIR'][:,:],geo['Longitude_VNIR'][:,:],mean_time)[2:-2,2:-2]

    longitude= geo['Longitude_VNIR'][2:-2,2:-2]
    latitude= geo['Latitude_VNIR'][2:-2,2:-2]

    #Create initial elevation raster
    elevation= dem_generate(longitude,latitude,elev_dir,temp_dir)
    zone,direction = utm_zone(longitude,latitude)

    # Calculate satellite X,Y,Z position for each line
    ''' GPS data are sampled at 1Hz resulting in steps in the
        position data, a line is fit to each dimension to estimate
        continuous position.

        There are more GPS samples than there are lines, to allign
        the GPS signal with the line, we use the provided 'Time'
        information for each line to match with the GPS data.

        When converting GPS time to UTC we use 17 sec difference
        instead of 18 sec because it matches the time provided in
        the time array.
    '''

    # Convert satellite GPS position time to UTC
    sat_t = []
    for second,week in zip(pvs['GPS_Time_of_Last_Position'][:].flatten(),pvs['Week_Number'][:].flatten()):
        gps_second = week*7*24*60*60 + second
        gps_epoch = dt.datetime(1980, 1, 6)
        gps_time  = gps_epoch+ dt.timedelta(seconds=gps_second - 17)
        sat_t.append(gps_time.hour*3600 + gps_time.minute*60. + gps_time.second)
    sat_t = np.array(sat_t)[:,np.newaxis]

    # Convert line MJD2000 to UTC
    grd_t = []
    for day in geo['Time'][:].flatten():
        time = mjd2000_epoch + dt.timedelta(days=day)
        grd_t.append(time.hour*3600 + time.minute*60. + time.second)
    grd_t = np.array(grd_t)[:,np.newaxis]

    #Fit a line to ground time
    X = np.concatenate([np.arange(1000)[:,np.newaxis], np.ones(grd_t.shape)],axis=1)
    slope, intercept = np.linalg.lstsq(X,grd_t,rcond=-1)[0].flatten()
    line_t_linear = slope*np.arange(1000)+ intercept

    #Fit a line to satellite time
    measurements = np.arange(len(sat_t))
    X = np.concatenate([measurements[:,np.newaxis], np.ones(sat_t.shape)],axis=1)
    slope, intercept = np.linalg.lstsq(X,sat_t,rcond=-1)[0].flatten()
    sat_t_linear = slope*measurements+ intercept

    # Interpolate x,y,z satelite positions
    sat_xyz = []
    for sat_pos in ['x','y','z']:
        sat_p = np.array(pvs['Wgs84_pos_%s' % sat_pos][:])
        slope, intercept = np.linalg.lstsq(X,sat_p,rcond=-1)[0].flatten()
        sat_p_linear = slope*measurements+ intercept
        interpolator = interp1d(sat_t_linear,sat_p_linear,
                                fill_value="extrapolate",kind = 'linear')
        sat_interp = interpolator(line_t_linear)
        sat_xyz.append(sat_interp[2:-2])
    sat_xyz = np.array(sat_xyz)

    # Calculate sensor to ground pathlength
    grd_xyz = np.array(dda2ecef(longitude,latitude,elevation))
    path = pathlength(sat_xyz,grd_xyz)

    # Export satellite position to csv
    sat_lon,sat_lat,sat_alt = ecef2dda(sat_xyz[0],sat_xyz[1],sat_xyz[2])
    satellite_df = pd.DataFrame()
    satellite_df['lat'] = sat_lat
    satellite_df['lon'] = sat_lon
    satellite_df['alt'] = sat_alt
    satellite_df.to_csv('%sPRS_%s_satellite_loc.csv' % (out_dir,base_name))

    # Convert satellite coords to local ENU
    sat_enu  = np.array(dda2utm(sat_lon,sat_lat,sat_alt,
                       utm_zone(longitude,latitude)))
    # Convert ground coords to local ENU
    easting,northing,up  =dda2utm(longitude,latitude,
                                elevation)

    # Calculate sensor geometry
    sensor_zn,sensor_az = sensor_view_angles(sat_enu,
                                             np.array([easting,northing,up]))

    # Perform image matching
    if match:
        coords =np.concatenate([np.expand_dims(easting.flatten(),axis=1),
                                np.expand_dims(northing.flatten(),axis=1)],axis=1)
        warp_east = easting.min()-100
        warp_north =northing.max()+100
        pixel_size = 30

        project = Projector()
        project.create_tree(coords,easting.shape)
        project.query_tree(warp_east,warp_north,pixel_size)

        # Project independent variables
        sensor_az_prj = project.project_band(sensor_az,-9999,angular=True)
        sensor_zn_prj = project.project_band(sensor_zn,-9999,angular=True)
        elevation_prj = project.project_band(elevation.astype(np.float),-9999)

        radiance = ht.HyTools()
        radiance.read_file(rdn_file, 'envi')

        #Average over Landsat 8 Band 5 bandwidth and warp
        unwarp_band = np.zeros(longitude.shape)
        for wave in range(850,890,10):
            unwarp_band += radiance.get_wave(wave)/7.
        warp_band = project.project_band(unwarp_band,-9999)
        warp_band = 16000*(warp_band-warp_band.min())/warp_band.max()

        if isinstance(match,bool):
            landsat,land_east,land_north = get_landsat_image(longitude,latitude,
                                                             mean_time.month,
                                                             max_cloud = 5)
        else:
            lst = ht.HyTools()
            lst.read_file(match,'envi')
            landsat = lst.get_band(0)
            land_east = float(lst.map_info[3])
            land_north = float(lst.map_info[4])

        #Calculate offsets between reference and input images
        offset_x = int((warp_east-land_east)//pixel_size)
        offset_y = int((land_north-warp_north)//pixel_size)

        #Calculate optimal shift
        y_model,x_model = image_match(landsat,warp_band,
                                      offset_x,offset_y,
                                      sensor_zn_prj,sensor_az_prj,elevation_prj)

        #Apply uniform filter
        smooth_elevation = uniform_filter(elevation,25)
        smooth_az = uniform_filter(sensor_az,25)
        smooth_zn = uniform_filter(sensor_zn,25)

        # Generate y and x offset surfaces
        i,a,b,c = y_model
        y_offset = i + a*smooth_zn +b*smooth_az + c*smooth_elevation

        i,a,b,c= x_model
        x_offset = i + a*smooth_zn +b*smooth_az + c*smooth_elevation

        # Calculate updated coordinates
        easting = easting+  30*x_offset
        northing = northing- 30*y_offset

        zone,direction = utm_zone(longitude,latitude)
        longitude,latitude = utm2dd(easting,northing,zone,direction)

        #Recalculate elevation with new coordinates
        logging.info('Rebuilding DEM')
        elevation= dem_generate(longitude,latitude,elev_dir,temp_dir)


    # Export location datacube
    loc_export(loc_file,longitude,latitude,elevation)

    # Generate remaining observable layers
    slope,aspect = slope_aspect(elevation,temp_dir)
    cosine_i = calc_cosine_i(np.radians(solar_zn),
                             np.radians(solar_az),
                             np.radians(slope),
                             np.radians(aspect))
    rel_az = np.radians(solar_az-sensor_az)
    phase =  np.arccos(np.cos(np.radians(solar_zn)))*np.cos(np.radians(solar_zn))
    phase += np.sin(np.radians(solar_zn))*np.sin(np.radians(solar_zn))*np.cos(rel_az)

    # Export observables datacube
    obs_export(obs_file,path,sensor_az,sensor_zn,
               solar_az,solar_zn,phase,slope,aspect,
               cosine_i,utc_time)

    if proj:
        #Create new projector with corrected coordinates
        new_coords =np.concatenate([np.expand_dims(easting.flatten(),axis=1),
                        np.expand_dims(northing.flatten(),axis=1)],axis=1)

        project = Projector()
        project.create_tree(new_coords,easting.shape)
        project.query_tree(easting.min()-100,northing.max()+100,30)

        blocksize = int(res/30)
        map_info = ['UTM', 1, 1, easting.min()-100 - (res/2), northing.max()+100 + (res/2),res,
                           res,zone,direction, 'WGS-84' , 'units=Meters']
        out_cols = int(blocksize* (project.output_shape[1]//blocksize))
        out_lines = int(blocksize* (project.output_shape[0]//blocksize))

        logging.info('Georeferencing datasets to %sm resolution' % res)
        for file in ['rdn','loc','obs']:
            input_name = '%sPRS_%s_%s' % (temp_dir,base_name,file)
            hy_obj = ht.HyTools()
            hy_obj.read_file(input_name, 'envi')
            iterator =hy_obj.iterate(by = 'band')

            out_header = hy_obj.get_header()
            out_header['lines']= project.output_shape[0]//blocksize
            out_header['samples']=project.output_shape[1]//blocksize
            out_header['data ignore value'] = -9999
            out_header['map info'] = map_info

            output_name = '%sPRS_%s_%s_prj' % (out_dir,base_name,file)
            writer = WriteENVI(output_name,out_header)

            while not iterator.complete:
                if (file == 'obs') & (iterator.current_band in [1,2,3,4,7]):
                    angular = True
                else:
                    angular = False
                band = project.project_band(iterator.read_next(),-9999,angular=angular)
                band[band == -9999] = np.nan
                bins =view_as_blocks(band[:out_lines,:out_cols], (blocksize,blocksize))

                if angular:
                    bins = np.radians(bins)
                    band = circmean(bins,axis=2,nan_policy = 'omit')
                    band = circmean(band,axis=2,nan_policy = 'omit')
                    band = np.degrees(band)
                else:
                    band = np.nanmean(bins,axis=(2,3))

                if file == 'rdn':
                    band[band<0] = 0
                band[np.isnan(band)] = -9999
                writer.write_band(band,iterator.current_band)

    logging.info('Deleting temporary files')
    shutil.rmtree(temp_dir)
Exemple #5
0
def l1b_process(l1b_zip,
                out_dir,
                temp_dir,
                elev_dir,
                match=None,
                proj=True,
                res=30):
    '''
     This function exports three files:
         *_rad* : Merged and optionally shift corrected radiance cube
         *_obs* : Observables file in the format of JPL obs files:
                 1. Pathlength (m)
                 2. To-sensor view azimuth angle (degrees)
                 3. To-sensor view zenith angle (degrees)
                 4. To-sun azimuth angle (degrees)
                 5. To-sun zenith angle (degrees)
                 6. Phase
                 7. Slope (Degrees)
                 8. Aspect (Degrees)
                 9. Cosine i
                 10. UTC decimal hours
         *_loc* : Location file in the following format:
                 1. Longitude (decimal degrees)
                 2. Longitude (decimal degrees)
                 3. Elevation (m)

     l1(str): L1B zipped radiance data product path
     out_dir(str): Output directory of ENVI datasets
     temp_dir(str): Temporary directory for intermediate
     elev_dir (str): Directory zipped Copernicus elevation tiles or url to AWS Copernicus data
                    ex : 'https://copernicus-dem-30m.s3.amazonaws.com/'
     match (str or list) : Pathname to Landsat image for image re-registration (recommended)
     proj (bool) : Project image to UTM grid
     res (int) : Resolution of projected image, 30 should be one of its factors (90,120,150.....)
    '''

    base_name = os.path.basename(l1b_zip)[14:-4]

    out_dir = '%s/DESIS_%s/' % (out_dir, base_name)
    if not os.path.isdir(out_dir):
        os.mkdir(out_dir)

    temp_dir = '%s/tmpDESIS_%s/' % (temp_dir, base_name)
    if not os.path.isdir(temp_dir):
        os.mkdir(temp_dir)

    zip_base = os.path.basename(l1b_zip)
    logging.info('Unzipping %s' % zip_base)
    with zipfile.ZipFile(l1b_zip, 'r') as zipped:
        zipped.extractall(temp_dir)

    l1b_file = gdal.Open('%s/DESIS-HSI-L1B-%s-SPECTRAL_IMAGE.tif' %
                         (temp_dir, base_name))

    # Parse relevant metadata from XML file, assume metadata are in same directory as iamges
    tree = ET.parse('%s/DESIS-HSI-L1B-%s-METADATA.xml' % (temp_dir, base_name))
    root = tree.getroot()
    specific = root[3]
    band_meta = {}
    for item in specific.findall('bandCharacterisation'):
        for band in item:
            for meta in band:
                if meta.tag not in band_meta.keys():
                    band_meta[meta.tag] = []
                string = str(meta.text.encode('utf8'))
                string = string.replace('\\n', ' ')
                string = string.replace('b', ' ')
                string = string.replace("'", ' ')
                values = string.split(',')
                values = [float(x) for x in values]
                if len(values) == 1:
                    values = values[0]
                band_meta[meta.tag].append(values)
    offset = np.array(band_meta['offsetOfBand'])
    gain = np.array(band_meta['gainOfBand'])
    waves = np.array(band_meta['wavelengthCenterOfBand'])
    fwhm = np.array(band_meta['wavelengthWidthOfBand'])
    response = np.array(band_meta['response'])
    response_waves = np.array(band_meta['wavelengths'])

    # Fit a gaussian to the reponse to determine center wavelength and fhwm
    opt_waves = []
    opt_fwhm = []
    for i, wave in enumerate(waves):
        popt, pcov = curve_fit(gaussian, response_waves[i],
                               np.array(response[i]) / max(response[i]),
                               [waves[i], fwhm[i]])
        opt_waves.append(popt[0])
        opt_fwhm.append(popt[1])

    scene_az = float(specific.findall('sceneAzimuthAngle')[0].text)
    scene_zn = float(specific.findall('sceneIncidenceAngle')[0].text)

    # Get acquisition start and end time
    base = root[2]
    time_str = base.findall('temporalCoverage')[0].findall('startTime')[0].text
    time_str = time_str.replace('T', ' ').replace('Z', '')
    start_time = dt.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f")
    start_time = start_time.replace(tzinfo=dt.timezone.utc)

    time_str = base.findall('temporalCoverage')[0].findall('endTime')[0].text
    time_str = time_str.replace('T', ' ').replace('Z', '')
    end_time = dt.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f")
    end_time = end_time.replace(tzinfo=dt.timezone.utc)
    date = dt.datetime.strftime(start_time, "%Y%m%d")

    # Get orbital data
    orbit_data = []
    for item in specific.findall('orbit'):
        for line in item:
            for point in line.findall('point'):
                time_str = line.findall('timeUTC')[0].text
                time_str = time_str.replace('T', ' ').replace('Z', '')
                orbit_time = dt.datetime.strptime(time_str,
                                                  "%Y-%m-%d %H:%M:%S.%f")
                orbit_time = orbit_time.replace(tzinfo=dt.timezone.utc)

                #The metadata contains orbital info beyond the collection window
                # we use the acquisition start and end times to filter points
                if (orbit_time >= start_time) & (orbit_time <= end_time):
                    for location in point.findall('location'):
                        x = float(location.findall('X')[0].text)
                        y = float(location.findall('Y')[0].text)
                        z = float(location.findall('Z')[0].text)
                        orbit_data.append([x, y, z])

    orbit_data = np.array(orbit_data)

    l1b_band = l1b_file.ReadAsArray().mean(axis=0)

    # Get bounding coordinates of scene
    coord_dict = {}
    polygon = base.findall('spatialCoverage')[0].findall('boundingPolygon')[0]
    for point in polygon:
        name = point.findall('frame')[0].text
        lat = float(point.findall('latitude')[0].text)
        lon = float(point.findall('longitude')[0].text)
        coord_dict[name] = [lat, lon]

    # Get ISS altitude
    altitude_m = float(base.findall('altitudeCoverage')[0].text)

    raster = l1b_file.ReadAsArray()
    mask = raster[1].astype(float)
    mask = mask == mask[0][0]

    rad_dict = envi_header_dict()
    rad_dict['lines'] = l1b_file.RasterYSize
    rad_dict['samples'] = l1b_file.RasterXSize - 85
    rad_dict['bands'] = len(waves) - 1
    rad_dict['wavelength'] = opt_waves[1:]
    rad_dict['fwhm'] = opt_fwhm[1:]
    rad_dict['interleave'] = 'bil'
    rad_dict['data type'] = 4
    rad_dict['wavelength units'] = "nanometers"
    rad_dict['byte order'] = 0
    rad_dict['data ignore value'] = -9999
    rad_dict['default bands'] = [
        np.argmin(np.abs(waves - 660)),
        np.argmin(np.abs(waves - 560)),
        np.argmin(np.abs(waves - 460))
    ]

    #Define output paths
    if proj:
        rad_file = '%sDESIS_%s_rdn' % (temp_dir, base_name)
        loc_file = '%sDESIS_%s_loc' % (temp_dir, base_name)
        obs_file = '%sDESIS_%s_obs' % (temp_dir, base_name)
    else:
        rad_file = '%sDESIS_%s_rdn' % (out_dir, base_name)
        loc_file = '%sDESIS_%s_loc' % (out_dir, base_name)
        obs_file = '%sDESIS_%s_obs' % (out_dir, base_name)

    writer = WriteENVI(rad_file, rad_dict)

    #Write VNIR cube
    logging.info('Exporting radiance data')
    for line_num in range(l1b_file.RasterYSize):
        line = raster[:, line_num, :].astype(float)
        line = line * gain[:, np.newaxis] + offset[:, np.newaxis]
        line = line[1:, 85:].T
        writer.write_line(line, line_num)

    del raster

    # Location datacube
    ###########################################################################
    lines, columns = np.indices((l1b_file.RasterYSize, l1b_file.RasterXSize))

    lat_vals = []
    lon_vals = []
    points = [[0, 0], [l1b_file.RasterYSize, 0],
              [l1b_file.RasterYSize, l1b_file.RasterXSize],
              [0, l1b_file.RasterXSize]]

    for point in [1, 2, 3, 4]:
        lat_vals.append(coord_dict['point_%s' % point][0])
        lon_vals.append(coord_dict['point_%s' % point][1])

    longitude = griddata(points, lon_vals, (lines, columns),
                         method='linear')[:, 85:]
    latitude = griddata(points, lat_vals, (lines, columns),
                        method='linear')[:, 85:]

    #Create initial elevation raster
    elevation = dem_generate(longitude, latitude, elev_dir, temp_dir)
    zone, direction = utm_zone(longitude, latitude)

    solar_az = solar.get_azimuth(latitude, longitude, start_time)
    solar_zn = 90 - solar.get_altitude(latitude, longitude, start_time)

    ecef = pyproj.Proj(proj='geocent', ellps='WGS84', datum='WGS84')
    lla = pyproj.Proj(proj='latlong', ellps='WGS84', datum='WGS84')

    grd_xyz = np.array(
        pyproj.transform(lla,
                         ecef,
                         longitude,
                         latitude,
                         elevation,
                         radians=False))

    # Calculate satellite XYZ position
    sat_xyz = []
    line_grid = np.linspace(0, 1, orbit_data.shape[0]) * longitude.shape[0]
    for sat_coord in orbit_data.T:
        interpolator = interp1d(line_grid, sat_coord)
        sat_interp = interpolator(np.arange(longitude.shape[0]))
        sat_xyz.append(sat_interp)
    sat_xyz = np.array(sat_xyz)
    path = np.linalg.norm(sat_xyz[:, :, np.newaxis] - grd_xyz, axis=0)
    # Export satellite position to csv
    sat_lon, sat_lat, sat_alt = ecef2dda(sat_xyz[0], sat_xyz[1], sat_xyz[2])
    satellite_df = pd.DataFrame()
    satellite_df['lat'] = sat_lat
    satellite_df['lon'] = sat_lon
    satellite_df['alt'] = sat_alt
    satellite_df.to_csv('%sDESIS_%s_satellite_loc.csv' % (out_dir, base_name))

    # Convert satellite coords to local ENU
    sat_enu = np.array(
        dda2utm(sat_lon, sat_lat, sat_alt, utm_zone(longitude, latitude)))
    # Convert ground coords to local ENU
    easting, northing, up = dda2utm(longitude, latitude, elevation)

    # Calculate sensor geometry
    sensor_zn, sensor_az = sensor_view_angles(
        sat_enu, np.array([easting, northing, up]))

    if match:
        coords = np.concatenate([
            np.expand_dims(easting.flatten(), axis=1),
            np.expand_dims(northing.flatten(), axis=1)
        ],
                                axis=1)
        warp_east = easting.min() - 100
        warp_north = northing.max() + 100
        pixel_size = 30

        project = Projector()
        project.create_tree(coords, easting.shape)
        project.query_tree(warp_east, warp_north, pixel_size)

        # Project independent variables
        sensor_az_prj = project.project_band(sensor_az, -9999)
        sensor_zn_prj = project.project_band(sensor_zn, -9999)
        elevation_prj = project.project_band(elevation.astype(np.float), -9999)

        radiance = ht.HyTools()
        radiance.read_file(rad_file, 'envi')

        #Average over Landsat 8 Band 5 bandwidth and warp
        warp_band = np.zeros(longitude.shape)
        for wave in range(850, 890, 10):
            warp_band += radiance.get_wave(wave) / 7.
        warp_band = project.project_band(warp_band, -9999)
        warp_band = 16000 * (warp_band - warp_band.min()) / warp_band.max()

        landsat, land_east, land_north = get_landsat_image(longitude,
                                                           latitude,
                                                           end_time.month,
                                                           max_cloud=5)

        #Calculate offsets between reference and input images
        offset_x = int((warp_east - land_east) // pixel_size)
        offset_y = int((land_north - warp_north) // pixel_size)

        #Calculate optimal shift
        y_model, x_model = image_match(landsat,
                                       warp_band,
                                       offset_x,
                                       offset_y,
                                       sensor_zn_prj,
                                       sensor_az_prj,
                                       elevation_prj,
                                       shift_max=30)

        #Apply uniform filter
        smooth_elevation = uniform_filter(elevation, 25)
        smooth_az = uniform_filter(sensor_az, 25)
        smooth_zn = uniform_filter(sensor_zn, 25)

        # Generate y and x offset surfaces
        i, a, b, c = y_model
        y_offset = i + a * smooth_zn + b * smooth_az + c * smooth_elevation

        i, a, b, c = x_model
        x_offset = i + a * smooth_zn + b * smooth_az + c * smooth_elevation

        # Calculate updated coordinates
        easting = easting + 30 * x_offset
        northing = northing - 30 * y_offset

        zone, direction = utm_zone(longitude, latitude)
        longitude, latitude = utm2dd(easting, northing, zone, direction)

        #Recalculate elevation with new coordinates
        logging.info('Rebuilding DEM')
        elevation = dem_generate(longitude, latitude, elev_dir, temp_dir)

    loc_export(loc_file, longitude, latitude, elevation)

    # Generate remaining observable layers
    slope, aspect = slope_aspect(elevation, temp_dir)
    cosine_i = calc_cosine_i(np.radians(solar_zn), np.radians(solar_az),
                             np.radians(slope), np.radians(aspect))
    rel_az = np.radians(solar_az - sensor_az)
    phase = np.arccos(np.cos(np.radians(solar_zn))) * np.cos(
        np.radians(solar_zn))
    phase += np.sin(np.radians(solar_zn)) * np.sin(
        np.radians(solar_zn)) * np.cos(rel_az)

    utc_time = (lines / (l1b_file.RasterYSize) *
                (end_time - start_time).seconds) / 60 / 60
    utc_time += start_time.hour + start_time.minute / 60
    utc_time = utc_time[:, 85:]

    obs_export(obs_file, path, sensor_az, sensor_zn, solar_az, solar_zn, phase,
               slope, aspect, cosine_i, utc_time)

    if proj:
        #Create new projector with corrected coordinates
        new_coords = np.concatenate([
            np.expand_dims(easting.flatten(), axis=1),
            np.expand_dims(northing.flatten(), axis=1)
        ],
                                    axis=1)

        project = Projector()
        project.create_tree(new_coords, easting.shape)
        project.query_tree(easting.min() - 100, northing.max() + 100, 30)

        blocksize = int(res / 30)
        map_info = [
            'UTM', 1, 1,
            easting.min() - 100,
            northing.max() + 100, res, res, zone, direction, 'WGS-84',
            'units=Meters'
        ]
        out_cols = int(blocksize * (project.output_shape[1] // blocksize))
        out_lines = int(blocksize * (project.output_shape[0] // blocksize))

        logging.info('Georeferencing datasets')
        for file in ['rdn', 'loc', 'obs']:
            logging.info(file)
            input_name = '%sDESIS_%s_%s' % (temp_dir, base_name, file)
            hy_obj = ht.HyTools()
            hy_obj.read_file(input_name, 'envi')
            iterator = hy_obj.iterate(by='band')

            out_header = hy_obj.get_header()
            out_header['lines'] = project.output_shape[0] // blocksize
            out_header['samples'] = project.output_shape[1] // blocksize
            out_header['data ignore value'] = -9999
            out_header['map info'] = map_info

            output_name = '%sDESIS_%s_%s_prj' % (out_dir, base_name, file)
            writer = WriteENVI(output_name, out_header)

            while not iterator.complete:
                band = project.project_band(iterator.read_next(), -9999)
                band[band == -9999] = np.nan
                band = np.nanmean(view_as_blocks(band[:out_lines, :out_cols],
                                                 (blocksize, blocksize)),
                                  axis=(2, 3))
                if file == 'rdn':
                    band[band < 0] = 0
                band[np.isnan(band)] = -9999
                writer.write_band(band, iterator.current_band)
    logging.info('Deleting temporary files')
    shutil.rmtree(temp_dir)
Exemple #6
0
def l1c_process(l1c_zip, out_dir, temp_dir, elev_dir):
    '''
     This function exports three files:
         *_rad* : Merged and optionally shift corrected radiance cube
         *_obs* : Observables file in the format of JPL obs files:
                 1. Pathlength (m)
                 2. To-sensor view azimuth angle (degrees)
                 3. To-sensor view zenith angle (degrees)
                 4. To-sun azimuth angle (degrees)
                 5. To-sun zenith angle (degrees)
                 6. Phase
                 7. Slope (Degrees)
                 8. Aspect (Degrees)
                 9. Cosine i
                 10. UTC decimal hours
         *_loc* : Location file in the following format:
                 1. Longitude (decimal degrees)
                 2. Longitude (decimal degrees)
                 3. Elevation (m)

     l1c_zip(str): L1c zipped radiance data product path
     out_dir(str): Output directory of ENVI datasets
     temp_dir(str): Temporary directory for intermediate
     elev_dir (str): Directory zipped Copernicus elevation tiles or url to AWS Copernicus data
                    ex : 'https://copernicus-dem-30m.s3.amazonaws.com/'
     match (str or list) : Pathname to Landsat image for image re-registration (recommended)
     proj (bool) : Project image to UTM grid
     res (int) : Resolution of projected image, 30 should be one of its factors (90,120,150.....)
    '''

    base_name = os.path.basename(l1c_zip)[14:-4]

    out_dir = '%s/DESIS_%s/' % (out_dir, base_name)
    if not os.path.isdir(out_dir):
        os.mkdir(out_dir)

    temp_dir = '%s/DESIS_%s/' % (temp_dir, base_name)
    if not os.path.isdir(temp_dir):
        os.mkdir(temp_dir)

    zip_base = os.path.basename(l1c_zip)
    logging.info('Unzipping %s' % zip_base)
    with zipfile.ZipFile(l1c_zip, 'r') as zipped:
        zipped.extractall(temp_dir)

    l1c_file = gdal.Open('%s/DESIS-HSI-L1C-%s-SPECTRAL_IMAGE.tif' %
                         (temp_dir, base_name))

    # Parse relevant metadata from XML file, assume metadata are in same directory as iamges
    tree = ET.parse('%s/DESIS-HSI-L1C-%s-METADATA.xml' % (temp_dir, base_name))
    root = tree.getroot()
    specific = root[3]
    band_meta = {}
    for item in specific.findall('bandCharacterisation'):
        for band in item:
            for meta in band:
                if meta.tag not in band_meta.keys():
                    band_meta[meta.tag] = []
                string = str(meta.text.encode('utf8'))
                string = string.replace('\\n', ' ')
                string = string.replace('b', ' ')
                string = string.replace("'", ' ')
                values = string.split(',')
                values = [float(x) for x in values]
                if len(values) == 1:
                    values = values[0]
                band_meta[meta.tag].append(values)
    offset = np.array(band_meta['offsetOfBand'])
    gain = np.array(band_meta['gainOfBand'])
    waves = np.array(band_meta['wavelengthCenterOfBand'])
    fwhm = np.array(band_meta['wavelengthWidthOfBand'])
    response = np.array(band_meta['response'])
    response_waves = np.array(band_meta['wavelengths'])

    # Fit a gaussian to the reponse to determine center wavelength and fhwm
    opt_waves = []
    opt_fwhm = []
    for i, wave in enumerate(waves):
        popt, pcov = curve_fit(gaussian, response_waves[i],
                               np.array(response[i]) / max(response[i]),
                               [waves[i], fwhm[i]])
        opt_waves.append(popt[0])
        opt_fwhm.append(popt[1])

    # Get acquisition start and end time
    base = root[2]
    time_str = base.findall('temporalCoverage')[0].findall('startTime')[0].text
    time_str = time_str.replace('T', ' ').replace('Z', '')
    start_time = dt.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f")
    start_time = start_time.replace(tzinfo=dt.timezone.utc)

    time_str = base.findall('temporalCoverage')[0].findall('endTime')[0].text
    time_str = time_str.replace('T', ' ').replace('Z', '')
    end_time = dt.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f")
    end_time = end_time.replace(tzinfo=dt.timezone.utc)
    date = dt.datetime.strftime(start_time, "%Y%m%d")

    l1c_band = l1c_file.ReadAsArray().mean(axis=0)

    # Get ISS altitude
    altitude_m = float(base.findall('altitudeCoverage')[0].text)

    raster = l1c_file.ReadAsArray()
    mask = raster[1].astype(float)
    mask = mask == mask[0][0]

    rad_dict = envi_header_dict()
    rad_dict['lines'] = l1c_file.RasterYSize
    rad_dict['samples'] = l1c_file.RasterXSize
    rad_dict['bands'] = len(waves) - 1
    rad_dict['wavelength'] = opt_waves[1:]
    rad_dict['fwhm'] = opt_fwhm[1:]
    rad_dict['interleave'] = 'bil'
    rad_dict['data type'] = 4
    rad_dict['wavelength units'] = "nanometers"
    rad_dict['byte order'] = 0
    rad_dict['data ignore value'] = -9999
    rad_dict['default bands'] = [
        np.argmin(np.abs(waves - 1660)),
        np.argmin(np.abs(waves - 850)),
        np.argmin(np.abs(waves - 560))
    ]
    ulx, pixel_size, a, uly, b, c = l1c_file.GetGeoTransform()
    projection = pyproj.Proj(l1c_file.GetProjection())
    zone = int(projection.crs.utm_zone[:-1])
    direction = projection.crs.utm_zone[-1]
    map_info = [
        'UTM', 1, 1, ulx, uly, pixel_size, pixel_size, zone, direction,
        'WGS-84', 'units=Meters'
    ]
    rad_dict['map info'] = map_info
    rad_file = '%sDESIS_%s_rdn_prj' % (out_dir, base_name)
    loc_file = '%sDESIS_%s_loc_prj' % (out_dir, base_name)
    obs_file = '%sDESIS_%s_obs_prj' % (out_dir, base_name)

    writer = WriteENVI(rad_file, rad_dict)

    #Write VNIR cube
    logging.info('Exporting radiance data')
    for line_num in range(l1c_file.RasterYSize):
        line = raster[:, line_num, :].astype(float)
        line = line * gain[:, np.newaxis] + offset[:, np.newaxis]
        line = line[1:, :].T
        line[mask[line_num]] = -9999
        writer.write_line(line, line_num)
    del raster

    # Location datacube
    ###########################################################################
    lines, columns = np.indices((l1c_file.RasterYSize, l1c_file.RasterXSize))
    easting = ulx + columns * pixel_size
    northing = uly - lines * pixel_size

    longitude, latitude = utm2dd(easting, northing, zone, direction)
    solar_az = solar.get_azimuth(latitude, longitude, start_time)
    solar_zn = 90 - solar.get_altitude(latitude, longitude, start_time)
    solar_az[mask] = -9999
    solar_zn[mask] = -9999

    #Create elevation raster
    elevation = dem_generate(longitude, latitude, elev_dir, temp_dir)
    elevation[mask] = -9999
    longitude[mask] = -9999
    latitude[mask] = -9999

    loc_header = envi_header_dict()
    loc_header['lines'] = l1c_file.RasterYSize
    loc_header['samples'] = l1c_file.RasterXSize
    loc_header['data ignore value'] = -9999
    loc_header['bands'] = 3
    loc_header['interleave'] = 'bil'
    loc_header['data type'] = 4
    loc_header['band_names'] = ['Longitude', 'Latitude', 'Elevation']
    loc_header['byte order'] = 0
    loc_header['map info'] = map_info

    writer = WriteENVI(loc_file, loc_header)
    writer.write_band(longitude, 0)
    writer.write_band(latitude, 1)
    writer.write_band(elevation, 2)

    # Observables datacube
    ###########################################################################
    # Calculate sensor geometry
    sensor_az = np.ones(easting.shape) * float(
        specific.findall('sceneAzimuthAngle')[0].text)
    sensor_zn = np.ones(easting.shape) * float(
        specific.findall('sceneIncidenceAngle')[0].text)
    sensor_az[mask] = -9999
    sensor_zn[mask] = -9999

    # Generate remaining observable layers
    slope, aspect = slope_aspect(elevation, temp_dir)
    cosine_i = calc_cosine_i(np.radians(solar_zn), np.radians(solar_az),
                             np.radians(slope), np.radians(aspect))
    rel_az = np.radians(solar_az - sensor_az)
    phase = np.arccos(np.cos(np.radians(solar_zn))) * np.cos(
        np.radians(solar_zn))
    phase += np.sin(np.radians(solar_zn)) * np.sin(
        np.radians(solar_zn)) * np.cos(rel_az)

    utc_time = ((end_time - start_time).seconds) / 60 / 60
    utc_time += start_time.hour + start_time.minute / 60
    utc_time *= np.ones(easting.shape)

    cosine_i[mask] = -9999
    rel_az[mask] = -9999
    phase[mask] = -9999
    utc_time[mask] = -9999
    #Not exact.....
    pathlength = altitude_m - elevation
    pathlength[mask] = -9999

    obs_header = envi_header_dict()
    obs_header['lines'] = l1c_file.RasterYSize
    obs_header['samples'] = l1c_file.RasterXSize
    obs_header['data ignore value'] = -9999
    obs_header['bands'] = 10
    obs_header['interleave'] = 'bil'
    obs_header['data type'] = 4
    obs_header['byte order'] = 0
    obs_header['band_names'] = [
        'path length', 'to-sensor azimuth', 'to-sensor zenith',
        'to-sun azimuth', 'to-sun zenith', 'phase', 'slope', 'aspect',
        'cosine i', 'UTC time'
    ]
    obs_header['map info'] = map_info

    writer = WriteENVI(obs_file, obs_header)
    writer.write_band(pathlength, 0)
    writer.write_band(sensor_az, 1)
    writer.write_band(sensor_zn, 2)
    writer.write_band(solar_az, 3)
    writer.write_band(solar_zn, 4)
    writer.write_band(phase, 5)
    writer.write_band(slope, 6)
    writer.write_band(aspect, 7)
    writer.write_band(cosine_i, 8)
    writer.write_band(utc_time, 9)

    logging.info('Deleting temporary files')
    shutil.rmtree(temp_dir)
Exemple #7
0
def h5_radiance_to_envi(filename, resolution=1):
    '''Convert a NEON HDF radiance file to ENVI formated
    image along with observables and location data cubes

    TODO: Recalculate terrain azimuth, provided product may be
    incorrect

    Args:
        filename (str): Path to HDF file.
        resolution (int, optional): Output image resolution. Defaults to 1.

    Returns:
        None.

    '''

    # Load HDF file
    hdf_obj = h5py.File(filename, 'r')

    key = [key for key in hdf_obj.keys()][0]
    rad_dec = hdf_obj[key]['Radiance']['RadianceDecimalPart']
    rad_int = hdf_obj[key]['Radiance']['RadianceIntegerPart']
    obs = hdf_obj[key]['Radiance']['Metadata']['Ancillary_Rasters']['OBS_Data']
    igm = hdf_obj[key]['Radiance']['Metadata']['Ancillary_Rasters']['IGM_Data']

    wavelengths = hdf_obj[key]['Radiance']['Metadata']['Spectral_Data'][
        'Wavelength'][:].tolist()
    fwhm = hdf_obj[key]['Radiance']['Metadata']['Spectral_Data'][
        'FWHM'][:].tolist()

    map_info = hdf_obj[key]['Radiance']['Metadata']['Coordinate_System'][
        'Map_Info'][()].decode("utf-8").split(',')
    epsg = hdf_obj[key]['Radiance']['Metadata']['Coordinate_System'][
        'EPSG Code'][()].decode("utf-8")

    new_lines = rad_dec.shape[0] // resolution
    new_cols = rad_dec.shape[1] // resolution

    map_info[5] = resolution
    map_info[6] = resolution

    map_info = [str(info).strip() for info in map_info]

    # Export integer and decimal radiance components
    # to temporary ENVI files
    rad_dict = envi_header_dict()
    rad_dict['lines'] = rad_dec.shape[0]
    rad_dict['samples'] = rad_dec.shape[1]
    rad_dict['bands'] = rad_dec.shape[2]
    rad_dict['interleave'] = 'bsq'
    rad_dict['data type'] = 12
    rad_dict['byte order'] = 0
    dec_temp = filename.replace('radiance.h5', 'rad_dec')
    writer = WriteENVI(dec_temp, rad_dict)
    writer.write_chunk(rad_dec, 0, 0)

    int_temp = filename.replace('radiance.h5', 'rad_int')
    writer = WriteENVI(int_temp, rad_dict)
    writer.write_chunk(rad_int, 0, 0)

    int_obj = ht.HyTools()
    int_obj.read_file(int_temp, 'envi')

    dec_obj = ht.HyTools()
    dec_obj.read_file(dec_temp, 'envi')

    # Export radiance
    ##################
    rad_dict = envi_header_dict()
    rad_dict['lines'] = new_lines
    rad_dict['samples'] = new_cols
    rad_dict['bands'] = rad_dec.shape[2]
    rad_dict['wavelength'] = wavelengths
    rad_dict['fwhm'] = fwhm
    rad_dict['interleave'] = 'bil'
    rad_dict['data type'] = 4
    rad_dict['wavelength units'] = "nanometers"
    rad_dict['byte order'] = 0
    rad_dict['data ignore value'] = -9999
    rad_dict['map info'] = map_info

    output_name = filename.replace('radiance.h5', 'rad')
    writer = WriteENVI(output_name, rad_dict)

    for band_num in range(rad_dict['bands']):
        print(band_num)
        band_int = int_obj.get_band(band_num).astype(float)
        band_dec = dec_obj.get_band(band_num) / 50000
        band = band_int + band_dec
        band[band_int == 255] = np.nan
        band = band[:new_lines * resolution, :new_cols * resolution]
        band = view_as_blocks(band, (resolution, resolution)).mean(axis=(2, 3))
        band[np.isnan(band)] = -9999
        writer.write_band(band, band_num)

    os.remove(dec_temp)
    os.remove(int_temp)

    # Export observables
    ####################
    obs_dict = envi_header_dict()
    obs_dict['band_names'] = [
        'path length', 'to-sensor azimuth', 'to-sensor zenith',
        'to-sun azimuth', 'to-sun zenith', 'phase', 'slope', 'aspect',
        'cosine i', 'UTC time'
    ]
    obs_dict['data type'] = 4
    obs_dict['lines'] = new_lines
    obs_dict['samples'] = new_cols
    obs_dict['bands'] = 10
    obs_dict['fwhm'] = fwhm
    obs_dict['interleave'] = 'bil'
    obs_dict['data type'] = 4
    obs_dict['byte order'] = 0
    obs_dict['data ignore value'] = -9999
    obs_dict['map info'] = map_info

    output_name = filename.replace('radiance.h5', 'obs')
    writer = WriteENVI(output_name, obs_dict)

    for band_num in range(obs_dict['bands']):
        print(band_num)
        band = obs[:, :, band_num]
        band[band == -9999] = np.nan
        band = band[:new_lines * resolution, :new_cols * resolution]
        band = view_as_blocks(band, (resolution, resolution)).mean(axis=(2, 3))
        band[np.isnan(band)] = -9999
        writer.write_band(band, band_num)

    # Export location datacube (lon,lat,elevation)
    ##############################################
    loc_dict = envi_header_dict()
    loc_dict['band_names'] = ['longitude', 'latitude', 'elevation']
    loc_dict['data type'] = 4
    loc_dict['lines'] = new_lines
    loc_dict['samples'] = new_cols
    loc_dict['bands'] = 3
    loc_dict['fwhm'] = fwhm
    loc_dict['interleave'] = 'bil'
    loc_dict['data type'] = 4
    loc_dict['byte order'] = 0
    loc_dict['data ignore value'] = -9999
    loc_dict['map info'] = map_info

    output_name = filename.replace('radiance.h5', 'loc')
    writer = WriteENVI(output_name, loc_dict)

    in_proj = pyproj.Proj("+init=EPSG:%s" % epsg)
    out_proj = pyproj.Proj("+init=EPSG:4326")

    longitude, latitude = pyproj.transform(in_proj, out_proj, igm[:, :, 0],
                                           igm[:, :, 1])

    elevation = igm[:, :, 2]
    mask = elevation == -9999

    for band_num, band in enumerate([longitude, latitude, elevation]):
        print(band_num)
        band[mask] = np.nan
        band = band[:new_lines * resolution, :new_cols * resolution]
        band = view_as_blocks(band, (resolution, resolution)).mean(axis=(2, 3))
        band[np.isnan(band)] = -9999
        writer.write_band(band, band_num)

    os.remove(filename)
Exemple #8
0
            ax.set_xlabel("Wavelength (nm)", fontsize=15)
            ax.set_ylabel("Reflectance", fontsize=15)
            ax.set_xlim(375, 2500)

            for direction in ['left', 'right', 'top', 'bottom']:
                ax.spines[direction].set_linewidth(1.5)

            if export_figures:
                plt.savefig("%s/PRISMA_%s_rfl_optimized.png" %
                            (figure_dir, base_name),
                            bbox_inches='tight',
                            dpi=500)
                plt.show()
                plt.close()

    shift_header = envi_header_dict()
    shift_header['lines'] = shift_surf.shape[0]
    shift_header['samples'] = shift_surf.shape[1]
    shift_header['bands'] = 1
    shift_header['interleave'] = 'bsq'
    shift_header['data type'] = 4
    shift_header['byte order'] = 0
    shift_header['band_names'] = ['wavelength shift(nm)']
    shift_file = "%s/sister/data/prisma/wavelength_shift/PRISMA_%s_wavelength_shift_surface" % (
        home, base_name)
    writer = WriteENVI(shift_file, shift_header)
    writer.write_band(shift_surf, 0)

    example_waves = [500, 750, 850, 1200, 1660, 2200]
    example_bands = [radiance.wave_to_band(wave) for wave in example_waves]