Пример #1
0
def retrieve_ssa(ssa_guess, pixel_index):
    # Skip the retrieval if the pixel quantities aren't good
    answer = np.zeros(n_wavelengths)
    if pixel_od[pixel_index] < 5 or solar_zenith_angle[pixel_index] > 72 or \
            emission_angle[pixel_index] > 72:
        answer[:] = np.nan
        print(f'skipping pixel {pixel_index}')
        return pixel_index, answer

    # Construct the EoS profiles
    # Use T profile from Kass et al 2019
    eos_file[:, 2] = np.array(
        [230, 230, 230, 230, 230, 230, 230, 230, 230, 230,
         230, 230, 230, 230, 230, 228, 226, 224, 222, 220,
         214, 208, 202, 196, 190, 186, 182, 178, 174, 170,
         166, 164, 158, 154, 150, 150, 150, 150, 150, 150,
         150])

    # Force P_surface to be 6.1 mbar
    eos_file[:, 1] *= eos_file[0, 1] / 610

    model_eos = eos_from_array(eos_file, z_boundaries, 3.71, 7.3 * 10 ** -26)
    temperatures = model_eos.temperature_boundaries
    h_lyr = model_eos.scale_height_boundaries

    # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    # Construct the mutable aerosol properties
    # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    # TODO: use real profile
    conrath_profile = Conrath(model_eos, 10, 0.01)
    # TODO: what is this really?
    p_sizes = np.linspace(1.5, 1.5, num=len(conrath_profile.profile))

    # Make Rayleigh stuff
    rco2 = RayleighCo2(short_wav[pixel_index, :], model_eos,
                       n_moments)

    def fit_ssa(guess, wav_index: int):
        # Trap the guess
        if not 0 <= guess <= 1:
            return 9999999

        # Take care of surface variables
        albedo = hapke[wav_index].albedo
        lamber = hapke[wav_index].lambertian
        rhou = hapke[wav_index].rhou
        rhoq = hapke[wav_index].rhoq
        bemst = hapke[wav_index].bemst
        emust = hapke[wav_index].emust
        rho_accurate = hapke[wav_index].rho_accurate

        cext = dust_file.array['primary'].data[:, :, 0]
        csca = dust_file.array['primary'].data[:, :, 1]
        csca[:, 0:2] = guess * cext[:, 0:2]
        c_ext = ForwardScatteringProperty(
            cext,
            particle_size_grid=sizes,
            wavelength_grid=wavs)
        c_sca = ForwardScatteringProperty(
            csca,
            particle_size_grid=sizes,
            wavelength_grid=wavs)

        dust_properties = ForwardScatteringPropertyCollection()
        dust_properties.add_property(c_ext, 'c_extinction')
        dust_properties.add_property(c_sca, 'c_scattering')

        # Use the Curiosity OD
        dust_col = Column(dust_properties, model_eos,
                          conrath_profile.profile, p_sizes,
                          short_wav[pixel_index, :], 0.88, pixel_od[pixel_index],
                          dust_phsfn)

        # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        # Make the model
        # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        model = ModelAtmosphere()
        # Make tuples of (dtauc, ssalb, pmom) for each constituent
        dust_info = (
        dust_col.total_optical_depth, dust_col.scattering_optical_depth,
        dust_col.scattering_optical_depth * dust_col.phase_function)
        rayleigh_info = (
        rco2.scattering_optical_depths, rco2.scattering_optical_depths,
        rco2.phase_function)  # This works since scat OD = total OD

        # Add dust and Rayleigh scattering to the model
        model.add_constituent(dust_info)
        model.add_constituent(rayleigh_info)

        model.compute_model()

        optical_depths = model.hyperspectral_total_optical_depths[:, wav_index]
        ssa = model.hyperspectral_total_single_scattering_albedos[:, wav_index]
        polynomial_moments = model.hyperspectral_legendre_moments[:, :, wav_index]

        # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        # Make the output arrays
        # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        oa = OutputArrays(cp)
        albmed = oa.albedo_medium
        flup = oa.diffuse_up_flux
        rfldn = oa.diffuse_down_flux
        rfldir = oa.direct_beam_flux
        dfdt = oa.flux_divergence
        uu = oa.intensity
        uavg = oa.mean_intensity
        trnmed = oa.transmissivity_medium

        # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        # Optical depth output structure
        # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        utau = UserLevel(cp, mb).optical_depth_output

        # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        # Run the model
        # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        rfldir, rfldn, flup, dfdt, uavg, uu, albmed, trnmed = \
            disort.disort(usrang, usrtau, ibcnd, onlyfl, prnt, plank, lamber,
                          deltamplus, dopseudosphere, optical_depths, ssa,
                          polynomial_moments, temperatures, low_wavenumber,
                          high_wavenumber, utau,
                          mu0[pixel_index],
                          phi0[pixel_index],
                          mu[pixel_index],
                          phi[pixel_index], fbeam, fisot,
                          albedo, btemp, ttemp, temis, radius, h_lyr, rhoq, rhou,
                          rho_accurate, bemst, emust, accur, header, rfldir,
                          rfldn, flup, dfdt, uavg, uu, albmed, trnmed)
        return (uu[0, 0, 0] - reflectance[pixel_index, wav_index])**2
    for wavelength in range(n_wavelengths):
        fitted_ssa = optimize.minimize(fit_ssa, np.array([ssa_guess]), wavelength, method='Nelder-Mead').x
        answer[wavelength] = fitted_ssa
    return pixel_index, answer
Пример #2
0
# Read in external files
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Read in the atmosphere file
data_path = os.path.join(
    os.path.abspath(
        os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')),
    'data')  # This hack sucks but I figure we need a quick resolution
#atmFile = ExternalFile(os.path.join(data_path, 'planets/mars/aux/mars_atm.npy'))
project_path = os.path.abspath(
    os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
atmFile = ExternalFile(os.path.join(project_path, 'tests/marsatm.npy'))
z_boundaries = np.linspace(
    80, 0, num=20
)  # Define the boundaries I want to use. Note that I'm sticking with DISORT's convention of starting from TOA
# New: eos_from_array is a function that returns a custom class---to help you out
model_eos = eos_from_array(atmFile.array, z_boundaries)
temperatures = model_eos.temperature_boundaries  # Define an oddball variable for use in the disort call

# Read in a 3D dust file
dustFile = ExternalFile(
    os.path.join(data_path, 'planets/mars/aux/dust_properties.fits'))
wavs = dustFile.array['wavelengths'].data
sizes = dustFile.array['particle_sizes'].data

# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Construct aerosol/model properties. These will almost certainly change
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Add the columns from dustFile to dust_properties
# Note: disort_multi ships with aerosol_dust.dat at 1.5 microns (index 10) of
# the forward scattering file. Also note that this has 14 particle sizes, not
# 13 like the phase function array I have
Пример #3
0
aero_path = '/home/kyle/repos/pyuvs-rt/data'
#eos_file = ExternalFile(os.path.join(aero_path, 'marsatm.npy'))
eos_file = np.load(os.path.join(aero_path, 'marsatm.npy'))

# Read in the dust scattering properties file
dust_file = ExternalFile(os.path.join(aero_path, 'dust_properties.fits'))

# Read in the dust phase function file
dust_phsfn_file = ExternalFile(os.path.join(aero_path,
                                            'dust_phase_function.fits'))

# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Make the equation of state variables on a custom grid
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
z_boundaries = np.linspace(80, 0, num=20)    # Define the boundaries to use
model_eos = eos_from_array(eos_file, z_boundaries, 3.71, 7.3*10**-26)
temperatures = model_eos.temperature_boundaries
h_lyr = model_eos.scale_height_boundaries

# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Construct the immutable aerosol properties
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
wavs = dust_file.array['wavelengths'].data
sizes = dust_file.array['particle_sizes'].data

# Hack my code like a scientist
wavs[1] = 0.439

# Hack my code like a scientist
dustwavs = dust_phsfn_file.array['wavelengths'].data
dustwavs[1] = 0.439