Пример #1
0
def plot_all_bins_occultation(ax,
                              hdf5_filename,
                              spectra_id_all_bins,
                              use_file_nu=False,
                              instrument_temperature=-999.):
    """plot the selected spectra from all the bins of one hdf5 file.
    If use_file_nu then take the wavenumbers from the file,
    else if an instrument temperature to recalculate nu grid.
    Output = waenumber grid"""

    hdf5_filepath, hdf5_file = getFile(hdf5_filename, "hdf5_level_1p0a", 0)

    #calibrate transmittances for all 4 bins
    obsDict0 = getLevel1Data(
        hdf5_file, hdf5_filename, 0, silent=True,
        top_of_atmosphere=60.0)  #use mean method, returns dictionary
    obsDict1 = getLevel1Data(
        hdf5_file, hdf5_filename, 1, silent=True,
        top_of_atmosphere=60.0)  #use mean method, returns dictionary
    obsDict2 = getLevel1Data(
        hdf5_file, hdf5_filename, 2, silent=True,
        top_of_atmosphere=60.0)  #use mean method, returns dictionary
    obsDict3 = getLevel1Data(
        hdf5_file, hdf5_filename, 3, silent=True,
        top_of_atmosphere=60.0)  #use mean method, returns dictionary

    #cat all data and sort by altitude
    yAll = np.concatenate((obsDict0["y_mean"], obsDict1["y_mean"],
                           obsDict2["y_mean"], obsDict3["y_mean"]))
    altAll = np.concatenate(
        (obsDict0["alt"], obsDict1["alt"], obsDict2["alt"], obsDict3["alt"]))
    sortIndices = altAll.argsort()
    altSorted = altAll[sortIndices]
    ySorted = yAll[sortIndices, :]

    #use wavenumber X from file, or remake from instrument temperature?
    #replace with line detection
    if use_file_nu:
        #       nu = obsDict0["x"][0, :] - 0.1
        nu_obs = obsDict0["x"][0, :]
    else:
        diffraction_order = int(hdf5_filename.split("_")[-1])
        pixels = np.arange(320)
        nu_obs = nu_mp(diffraction_order, pixels, instrument_temperature)

    for spectrum_index in spectra_id_all_bins:
        yBaseline = baseline_als(ySorted[spectrum_index, :])
        yCorrected = ySorted[spectrum_index, :] / yBaseline

        ax.plot(nu_obs,
                yCorrected,
                label="%0.1fkm" % altSorted[spectrum_index])

    return nu_obs
Пример #2
0
def plot_single_bin_occultation(ax,
                                hdf5_filename,
                                spectra_id_all_bins,
                                use_file_nu=False,
                                instrument_temperature=-999.):
    """plot the selected spectra from a single bin of one hdf5 file.
    If use_file_nu then take the wavenumbers from the file,
    else if an instrument temperature to recalculate nu grid.
    Output = waenumber grid"""

    hdf5_filepath, hdf5_file = getFile(hdf5_filename, "hdf5_level_1p0a", 0)

    #find which spectrum indices in the source file correspond to those in the chosen bin
    bins = hdf5_file["Science/Bins"][:, 0]
    uniqueBins = sorted(list(set(bins)))
    binIndices = np.where(bins == uniqueBins[CHOSEN_BIN])[0]
    binIds = []
    for binId, binIndex in enumerate(binIndices):
        if binIndex in spectra_id_all_bins:
            binIds.append(binId)

    #get data for the chosen bin
    obsDict = getLevel1Data(
        hdf5_file,
        hdf5_filename,
        CHOSEN_BIN,
        silent=True,
        top_of_atmosphere=60.0)  #use mean method, returns dictionary

    #use wavenumber X from file, or remake from instrument temperature?
    #replace with line detection
    if use_file_nu:
        nu_obs = obsDict["x"][0, :]
    else:
        diffraction_order = int(hdf5_filename.split("_")[-1])
        pixels = np.arange(320)
        nu_obs = nu_mp(diffraction_order, pixels, instrument_temperature)

    #remove baseline continuum and plot
    for spectrumIndex in binIds:
        y_mean_baseline = baseline_als(obsDict["y_mean"][spectrumIndex, :])
        y_mean_corrected = obsDict["y_mean"][
            spectrumIndex, :] / y_mean_baseline

        ax.plot(nu_obs,
                y_mean_corrected,
                label="SO bin %i @ %0.1fkm " %
                (CHOSEN_BIN, obsDict["alt"][spectrumIndex]))

    return nu_obs
Пример #3
0
            "No valid indices found. Attempt 3: Reducing signal cutoff to %0.1f and absorption signal to %0.1f"
            % (nadir_mean_signal_cutoff, minimum_signal_for_absorption))
        validIndices = np.where(
            np.nanmean(yBinnedNorm, axis=1) > nadir_mean_signal_cutoff)[0]

#plot raw values
fig1, (ax1a, ax1b) = plt.subplots(nrows=2, figsize=(FIG_X, FIG_Y), sharex=True)
for validIndex in validIndices:
    ax1a.plot(xIn, yBinnedNorm[validIndex, :], alpha=0.3)

#plot mean spectrum
mean_spectrum = np.nanmean(yBinnedNorm[validIndices, :], axis=0)
ax1a.plot(xIn, mean_spectrum, "k")

#plot baseline corrected spectra
mean_spectrum_baseline = baseline_als(
    mean_spectrum)  #find continuum of mean spectrum
ax1a.plot(xIn, mean_spectrum_baseline, "k--")

mean_corrected_spectrum = mean_spectrum / mean_spectrum_baseline
ax1b.plot(xIn, mean_corrected_spectrum, "r")

#do quadratic  fit to find true absorption minima
std_corrected_spectrum = np.std(mean_corrected_spectrum)
abs_points = np.where((mean_corrected_spectrum <
                       (1.0 - std_corrected_spectrum * n_stds_for_absorption))
                      & (mean_spectrum > minimum_signal_for_absorption))[0]
ax1b.scatter(xIn[abs_points], mean_corrected_spectrum[abs_points], c="r", s=10)

#find pixel indices containing absorptions in nadir data
#split indices for different absorptions into different lists
previous_point = abs_points[0] - 1
Пример #4
0
    frame_range = frame_ranges[regex.pattern]

    detector_data_selected = detector_data_reshaped[
        frame_range[0]:frame_range[1], :, :]

    absorption_minima = []
    detector_rows = []
    indices = []
    spectra_cont_removed = []
    i = 0
    for frame in detector_data_selected:
        for frame_index, spectrum in enumerate(frame):
            if spectrum[180] > signal_minimum:
                i += 1
                spectrum_cont = baseline_als(spectrum)
                spectra_cont_removed.append(spectrum / spectrum_cont)

                absorption_minimum = findAbsorptionMinimum(
                    spectrum / spectrum_cont, continuum_range)
                absorption_minima.append(absorption_minimum)
                detector_rows.append(frame_index)
                indices.append(i)

    ill_rows = sorted(list(set(detector_rows)))

    spectra_cont_removed = np.asfarray(spectra_cont_removed)

    plt.figure(figsize=(FIG_X - 3.5, FIG_Y))
    # plt.scatter(absorption_minima, detector_rows, marker="o", c=indices, linewidth=0, alpha=0.5)
    plt.scatter(absorption_minima,
Пример #5
0
            if DATA_TYPE == "ground":
                if USE_CSL_TEMPERATURES: #overwrite temperature with one from external file
                    measurement_temperature = getExternalTemperatureReadings(measurement_time.decode(), CSL_TEMPERATURE_COLUMN)

            if DATA_TYPE == "inflight":
                if USE_TGO_TEMPERATURES: #overwrite temperature with one from external file
                    measurement_temperature = get_sql_spectrum_temperature(hdf5_file, chosen_order_index)


            order_data_dict[diffraction_order]["hdf5_filenames"].append(hdf5_filename)
            order_data_dict[diffraction_order]["measurement_temperatures"].append(measurement_temperature)
            order_data_dict[diffraction_order]["colour"].append(colours[int(measurement_temperature)+20])
            
            
            #remove continuum
            continuum = baseline_als(normalised_spectrum)
            order_data_dict[diffraction_order]["continuum_mean"].append(np.mean(continuum))
            order_data_dict[diffraction_order]["continuum_std"].append(np.std(continuum))

            absorption_spectrum = normalised_spectrum / continuum
            #normalise between 0 and 1
            absorption_spectrum = (absorption_spectrum - np.min(absorption_spectrum))/ (np.max(absorption_spectrum) - np.min(absorption_spectrum))
            order_data_dict[diffraction_order]["spectra"].append(absorption_spectrum)


        if len(chosen_order_indices) == 0:
            text = "AOTF frequency %0.0f kHz (order %i) %0.1fC not found in file %s" %(desired_aotf, diffraction_order, measurement_temperature, hdf5_filename)
            aotf_frequency_all = hdf5_file["Channel/AOTFFrequency"][...]
            diffraction_orders = np.asfarray([findOrder("lno", aotf_frequency, silent=True) for aotf_frequency in aotf_frequency_all])
            text += " (%0.0f-%0.0fkHz; orders=%i-%i)" %(min(aotf_frequency_all), max(aotf_frequency_all), min(diffraction_orders), max(diffraction_orders))
            print(text)
Пример #6
0
            y_mean = np.mean(detector_data_all[i, 12, 160:240])
            # y_std = np.std(detector_data_all[i, 12, 160:240])

            minima[i] = {
                "row_no": [],
                "min_pixel": [],
                "chisq": [],
                "colour": colours2[index]
            }

            for j in range(n_rows_raw):  #loop through detector rows

                if np.mean(detector_data_all[i, j, 160:240]) > 0.3 * y_mean:
                    spectrum = detector_data_all[i, j, :]

                    continuum = baseline_als(spectrum)
                    absorption = spectrum / continuum

                    ax1.plot(absorption,
                             color=colours[j],
                             label="i=%i, row=%i" % (i, row_no[j]))

                    # sav_gol = savgol_filter(spectrum, 9, 2)
                    # plt.plot(sav_gol, color=colours[j], linestyle=":", label="i=%i, row=%i" %(i,row_no[j]))

                    # oversampled = ss.resample(spectrum, 640)
                    # plt.plot(np.arange(0, 320, 0.5), oversampled, color=colours[j], linestyle="--", label="i=%i, row=%i" %(i,row_no[j]))

                    rel_indices = absorption_indices[orders[i]] - np.mean(
                        absorption_indices[orders[i]])
                    gaussian = fit_gaussian_absorption(
            integration_time = np.float(
                integration_time_raw) / 1.0e3  #microseconds to seconds
            number_of_accumulations = np.float(
                number_of_accumulations_raw
            ) / 2.0  #assume LNO nadir background subtraction is on
            n_px_rows = 1.0

            measurement_seconds = integration_time * number_of_accumulations
            #normalise to 1s integration time per pixel
            spectrum_counts = y_spectrum / measurement_seconds / n_px_rows
            label = "%s order %0.0fkHz %0.1fC" % (
                hdf5_filename[:15], aotf_frequency, measurement_temperature)

            #remove baseline
            y_baseline = baseline_als(
                spectrum_counts)  #find continuum of mean spectrum
            y_corrected = spectrum_counts / y_baseline
            ax1a.plot(pixels,
                      y_corrected,
                      color=colours[temperature_index],
                      label="%0.1fC" % measurement_temperature)
            ax2a.plot(pixels,
                      spectrum_counts,
                      "--",
                      color=colours[temperature_index],
                      alpha=0.7,
                      label="%0.1fC" % measurement_temperature)

            if solar_line:
                """find centres of solar lines. Spectral cal is approximate so first find solar line in data using approx cal"""
                #step1: find nearest pixel number where calculated px nu = real solar line nu
Пример #8
0
    def process_channel_data(self, args):
        """make database containing info about all spectra in a channel for a particular observation type"""

        self.level = args.level
        self.command = args.command
        if args.silent:
            silent = True
        else:
            silent = False

        table_fields = obs_database_fields(self.level,
                                           bira_server=self.bira_server)
        table_name = self.level
        if args.regenerate:
            print("Deleting and regenerating table")
            self.check_if_table_exists(table_name)
            self.drop_table(table_name)
            self.new_table(table_name, table_fields)

        print("Getting file list")
        if args.regex:
            regex = re.compile(args.regex)
        else:
            if self.command == "lno_nadir":
                if self.level == "hdf5_level_1p0a":
                    regex = re.compile("20.*_LNO.*_D(P|F).*")
                else:
                    regex = re.compile("20.*_LNO.*_D.*")
            elif self.command == "so_occultation":
                regex = re.compile("20.*_SO.*_[IE].*")
            elif self.command == "uvis_nadir":
                regex = re.compile("20.*_UVIS.*_D")
            elif self.command == "uvis_occultation":
                regex = re.compile("20.*_UVIS.*_[IE]")

        beg_datetime = datetime.datetime.strptime(args.beg, ARG_FORMAT)
        end_datetime = datetime.datetime.strptime(args.end, ARG_FORMAT)

        _, hdf5Filenames, _ = make_filelist(regex,
                                            self.level,
                                            silent=silent,
                                            open_files=False)

        print("%i files found in directory" % len(hdf5Filenames))
        #make datetime from hdf5 filenames, find those that match the beg/end times
        hdf5_datetimes = [
            datetime.datetime.strptime(i[:15], HDF5_FILENAME_FORMAT)
            for i in hdf5Filenames
        ]

        #        hdf5_file_indices = [i for i, hdf5_datetime in enumerate(hdf5_datetimes) if beg_datetime < hdf5_datetime < end_datetime]
        matching_hdf5_filenames = [
            hdf5_filename for hdf5_datetime, hdf5_filename in zip(
                hdf5_datetimes, hdf5Filenames)
            if beg_datetime < hdf5_datetime < end_datetime
        ]

        print("Adding %i files between %s and %s to database" %
              (len(matching_hdf5_filenames), args.beg, args.end))
        for fileIndex, hdf5Filename in enumerate(matching_hdf5_filenames):

            hdf5Filepath = get_filepath(hdf5Filename)
            if not silent:
                print("Collecting data: file %i/%i: %s" %
                      (fileIndex, len(matching_hdf5_filenames), hdf5Filename))

            with h5py.File(hdf5Filepath, "r") as hdf5File:
                orbit = hdf5File.attrs["Orbit"]
                filename = hdf5Filename

                diffraction_order = hdf5File["Channel/DiffractionOrder"][0]
                sbsf = hdf5File["Channel/BackgroundSubtraction"][0]
                utc_start_times = hdf5File["Geometry/ObservationDateTime"][:,
                                                                           0]
                duration = get_obs_duration(hdf5File)
                n_spectra = len(utc_start_times)
                n_orders = hdf5File.attrs["NSubdomains"]
                longitudes = hdf5File["Geometry/Point0/Lon"][:, 0]
                latitudes = hdf5File["Geometry/Point0/Lat"][:, 0]
                if self.command == "lno_nadir":
                    mean_temperature_tgo = np.mean(
                        hdf5File["Temperature/NominalLNO"][...])
                    bin_index = np.ones(n_spectra)
                    incidence_angles = hdf5File[
                        "Geometry/Point0/IncidenceAngle"][:, 0]
                    altitudes = np.zeros(n_spectra) - 999.0
                elif self.command == "so_occultation":
                    mean_temperature_tgo = np.mean(
                        hdf5File["Temperature/NominalSO"][...])
                    bin_index = hdf5File["Channel/IndBin"][...]
                    incidence_angles = np.zeros(n_spectra) - 999.0
                    altitudes = hdf5File["Geometry/Point0/TangentAltAreoid"][:,
                                                                             0]
                local_times = hdf5File["Geometry/Point0/LST"][:, 0]

                sql_table_rows = []

                #get mean of y radiance factor continuum
                if self.level == "hdf5_level_1p0a":
                    y = hdf5File["Science/YReflectanceFactor"][:, :]
                    for i in range(n_spectra):
                        if incidence_angles[i] < 80.0:
                            continuum = baseline_als(y[i, :])
                            y_mean = np.mean(continuum[160:240])

                            sql_table_rows.append([None, orbit, filename, i, mean_temperature_tgo, \
                               int(diffraction_order), int(sbsf), int(bin_index[i]), utc_start_times[i].decode(), \
                               duration, int(n_spectra), int(n_orders), longitudes[i], latitudes[i], \
                               altitudes[i], incidence_angles[i], local_times[i], float(y_mean)])
                else:

                    for i in range(n_spectra):
                        sql_table_rows.append([None, orbit, filename, i, mean_temperature_tgo, \
                           int(diffraction_order), int(sbsf), int(bin_index[i]), utc_start_times[i].decode(), \
                           duration, int(n_spectra), int(n_orders), longitudes[i], latitudes[i], \
                           altitudes[i], incidence_angles[i], local_times[i]])
                sql_table_rows_datetime = self.convert_table_datetimes(
                    table_fields, sql_table_rows)
                #                self.insert_rows(table_name, table_fields, sql_table_rows_datetime, check_duplicates=False)
                self.insert_rows(table_name,
                                 table_fields,
                                 sql_table_rows_datetime,
                                 check_duplicates=True)
Пример #9
0
y_offset_corrected_mean_1bin = np.mean(y_offset_corrected_all, axis=0)
y_offset_corrected_std = np.std(
    y_offset_corrected_mean_1bin[indices_no_strong_abs])

if 2 in plot_type:
    ax1.plot(x,
             y_offset_corrected_mean_1bin,
             "b",
             label="bin %i after correction" % i)
    ax1.axhline(y=1.0 - 1.0 * y_offset_corrected_std,
                color="b",
                linestyle=":",
                label="stdev after correction")
    ax1.axhline(y=1.0 + y_offset_corrected_std, color="b", linestyle=":")

    y_continuum = baseline_als(y_offset_corrected_mean_1bin)
    y_baseline_corrected_mean_1bin = y_offset_corrected_mean_1bin / y_continuum

    ax2.plot(x,
             y_baseline_corrected_mean_1bin,
             "b",
             label="bin %i after baseline correction" % i)
    ax2.axhline(y=1.0 - 1.0 * y_offset_corrected_std,
                color="b",
                linestyle=":",
                label="stdev after correction")
    ax2.axhline(y=1.0 + y_offset_corrected_std, color="b", linestyle=":")

#    sys.exit()

#plot transmittance vs alt all bins
Пример #10
0
            input("Pausing")

    else:

        #find and remove atmospheric line indices
        valid_xs = []
        for abs_line in lno_curvature_dict[diffraction_order]["clear_nu"]:
            #get pixels not containing absorption line or too close to edges of detector
            valid_xs.extend(np.where((abs_line[0] < x) & (x < abs_line[1]))[0])

        #get error
        err = np.abs(
            y_selected[valid_xs] /
            np.polyval(np.polyfit(pixels[valid_xs], y_selected[valid_xs], 8),
                       pixels[valid_xs]) - 1.0)
        als = baseline_als(err, lam=500.) * 0.66
        y_err = np.polyval(np.polyfit(pixels[valid_xs], als, 9), pixels)
        # y_err[valid_xs[-1]:320] = als[-1] #extrapolate to end of detector
        y_err[y_err < 0.0] = np.mean(err)  #remove negatives

        # plt.figure()
        # # plt.plot(pixels[valid_xs], y_selected[valid_xs])
        # plt.plot(pixels[valid_xs], err, "g")
        # plt.plot(pixels[valid_xs], als, "b")
        # plt.plot(pixels, y_err, "b--")
        # stop()

        x = pixels
        #x relative to centre
        x_mean = np.mean(x)
        x_centre = x - x_mean