from sba.plotting import plot_spectra, map_data from sba.io import read, write_data, find_auxiliary_information_seabass from sba.data_processing import remove_negative_R_rs, remove_rows_based_on_threshold, convert_to_unit, add_Lw_from_Ed_Rrs folder = Path("data/SMF/ASD/") files = list(folder.glob("*ASD*")) tabs = [] for file in files: wavelengths, Es, Rrs = np.loadtxt(file, delimiter="\t", skiprows=50, unpack=True, usecols=[0, 1, 3]) date, time, lon, lat = find_auxiliary_information_seabass(file) cols = ["Date", "Time", "Latitude", "Longitude"] + [ f"Ed_{wvl:.0f}" for wvl in wavelengths ] + [f"R_rs_{wvl:.0f}" for wvl in wavelengths] dtype = [int, "S8", float, float] + 2 * [float for wvl in wavelengths] tab = table.Table(rows=[[date, time, lat, lon, *Es, *Rrs]], names=cols, dtype=dtype) tabs.append(tab) data = table.vstack(tabs) convert_to_unit(data, "Ed", u.microwatt / (u.centimeter**2 * u.nanometer), u.watt / (u.meter**2 * u.nanometer)) convert_to_unit(data, "R_rs", 1 / u.steradian)
from astropy import table from astropy import units as u from pathlib import Path from sba.plotting import plot_spectra, map_data from sba.io import read, write_data, find_auxiliary_information_seabass from sba.data_processing import get_keys_with_label, split_spectrum, remove_rows_based_on_threshold data = read("data/TAOM/ep1_hr3.avg.prod_1_1001.ftp", data_start=30) header = read("data/TAOM/ep1_hr3.avg.prod_1_1001.ftp", data_start=27, data_end=28) header["col1"][0] = "year" header = header[0].as_void() for key, new_key in zip(data.keys(), header): data.rename_column(key, new_key) date, time, lon, lat = find_auxiliary_information_seabass("data/TAOM/ep1_hr3.avg.prod_1_1001.ftp") data.add_column(table.Column(name="Latitude", data=[lat]*len(data))) data.add_column(table.Column(name="Longitude", data=[lon]*len(data))) data.remove_columns(get_keys_with_label(data, "Lwn")) Lw_keys, R_rs_keys = get_keys_with_label(data, "Lw", "Rrs") for Lw_k, R_rs_k in zip(Lw_keys, R_rs_keys): wavelength = float(Lw_k[2:]) data[Lw_k].unit = u.microwatt / (u.cm**2 * u.nm * u.steradian) data[Lw_k] = data[Lw_k].to(u.watt / (u.m**2 * u.nm * u.steradian)) data.rename_column(Lw_k, f"Lw_{wavelength:.4f}") data[R_rs_k].unit = 1 / u.steradian
remove_indices = np.where((time_table < time_start) | (time_table > time_start + 3))[0] data_table.remove_rows(remove_indices) # If no data are available, go to the next file if len(data_table) == 0: continue # Calculate median values and replace table with only these means = [np.median(data_table[key]) for key in data_table.keys()[3:]] mean_data = [*data_table[0]["year", "jd"], row["time_GMT"], *means] data_table.add_row(mean_data) data_table.remove_rows(np.arange(len(data_table) - 1)) # Finally, load lat/lon *_, lon, lat = find_auxiliary_information_seabass(data_files[0]) data_table.add_column(table.Column(name="Longitude", data=[lon])) data_table.add_column(table.Column(name="Latitude", data=[lat])) data.append(data_table) print(j, row["Station"]) data = table.vstack(data) rename_columns("Es", "Ed_") rename_columns("Lsky", "Ls_") rename_columns("Lt", "Lt_") convert_to_unit(data, "Ed", u.microwatt / (u.centimeter**2 * u.nanometer), u.watt / (u.meter**2 * u.nanometer))