def make_non_virtual_temp_plots(): input_dir = path_util.get_project_root( ) / "calculations" / "calc_from_intermediate" levels = [2] data = dict() print("Loading data") for level in levels: level_input_dir = input_dir / str(level) data[level] = pd.read_csv(level_input_dir / "results-temp.csv", sep="\t", parse_dates=["time"]) output_dir = input_dir / "plots" output_dir.mkdir(parents=True, exist_ok=True) #kinematic_temp_flux print("Kinematic temp flux") args = PlotVariables(column="kinematic_temp_flux", plot_title="Kinematic Temperature Flux (cov T, w)", y_label="Flux (K*m/s)", output_path=output_dir / "temp_flux.png") plot_variable(data, args, only_surface=True) # H_s print("H_s") args = PlotVariables(column="H_s", plot_title="Sensible Heat Flux (H_s)", y_label="Flux (W/m^2)", output_path=output_dir / "sensible_heat_flux.png") plot_variable(data, args, only_surface=True)
def load_processed_sonic_data(level, directory_override=None): """ Loads the sonic anemometer data (from an intermediate dataset you need to download!) Column names are: "time", "pressure", "rho_v", "u", "v", "w", "virtual_temp", "temp", "potential_temp" :param level: 2, 5, or 10 - the height (in meters) of the dataset :param directory_override: path to test data/nondefault data path if applicable :return: pandas.DataFrame """ if type(directory_override) == bool and directory_override: directory_override = get_project_root( ) / "data/2021 Final Project Data/SonicData/select_fields/1hr" directory = _get_data_root_dir() / directory_override if directory_override \ else _get_sonic_data_dir() / "select_fields" file = directory / ("sonic_data_" + str(level)) try: df = pd.read_csv(file, sep="\t", parse_dates=["time"], index_col=0) df["time"] = df["time"] - datetime.timedelta(days=366) # df = df[(df["time"] >= pd.Timestamp('2018-09-13 21:00:00')) & (df["time"] <= # pd.Timestamp('2018-09-14 06:00:00'))] return df except FileNotFoundError as e: _handle_file_not_found(e, "select_fields")
def __init__(self): with open(get_project_root() / "calculations" / "calculations_config.yaml") as config_file: config = yaml.load(config_file, Loader=yaml.FullLoader) self.config = config dirs = [self.get_figure_output_dir(), self.get_data_output_dir()] for directory in dirs: directory.mkdir(parents=True, exist_ok=True)
class TestTimeFormatter(unittest.TestCase): test_dir = get_project_root() / "test" / "test_data" def test_get_time_parsing_args_for_visibility_data(self): kwargs = time_format.get_time_parsing_args_for_visibility_data() test_file = self.test_dir / "mock_vis_time_data.csv" df = pd.read_csv(test_file, sep="\t", **kwargs) expected = datetime.datetime(2018, 9, 13, second=9) self.assertEquals(expected, df.iloc[0]["time"])
def main(test=True): intermediate_dir = path_util.get_project_root() / "data" / "intermediate" output_dir = path_util.get_project_root( ) / "calculations" / "calc_from_intermediate" / "dissipation" if test: intermediate_dir = intermediate_dir / "test" output_dir = output_dir / "test" levels = [2, 5, 10] datasets = dict() for level in levels: input_dir = intermediate_dir / str(level) datasets[level] = load_datasets(input_dir) dissipation_rates = dict() etas = dict() for level in levels: dissipation_rates[level] = calculate_dissipation_rate(datasets, level) etas[level] = calculate_kolmogorov_length(dissipation_rates[level]) output_dir.mkdir(parents=True, exist_ok=True) plot_dissipation_rate(dissipation_rates, output_dir) plot_eta(etas, output_dir)
def main(test=True): intermediate_dir = path_util.get_project_root() / "data" / "intermediate" output_dir = path_util.get_project_root( ) / "calculations" / "calc_from_intermediate" if test: intermediate_dir = intermediate_dir / "test" output_dir = output_dir / "test" tke_output_dir = output_dir / "tke_10" levels = [2, 5, 10] tke_10_min_sets = dict() for level in levels: input_dir = intermediate_dir / str(level) datasets = load_datasets(input_dir) if level == 2: surface_data = datasets # run_calculations(level, datasets, surface_data, output_dir / str(level), test) tke_10_min_sets[level] = calculate_tke_10_min( datasets, tke_output_dir / str(level)) plot_tke_10_min(tke_10_min_sets, tke_output_dir)
def get_sonic_data(level, test): if level in sonic_data_cached: return sonic_data_cached[level] test_dir = None if test: test_dir = path_util.get_project_root( ) / "data/2021 Final Project Data/SonicData/select_fields/1hr" print("Loading sonic data") sonic_data = data_loader.load_processed_sonic_data( level, directory_override=test_dir) print("... Loaded") sonic_data_cached[level] = sonic_data return sonic_data
def main(test=True): root_output_dir = path_util.get_project_root() / "data" / "intermediate" override = None if test: root_output_dir = root_output_dir / "test" override = _get_data_root_dir() / "SonicData/select_fields/1hr" levels = [2, 5, 10] for level in levels: output_dir = root_output_dir / str(level) output_dir.mkdir(parents=True, exist_ok=True) sonic_data = load_processed_sonic_data(level, override) ## Change which methods to call calculate_w_prime(sonic_data, output_dir / "w_prime.csv") calculate_u_prime(sonic_data, output_dir / "u_prime.csv") calculate_T_prime(sonic_data, output_dir / "T_prime.csv") calculate_v_prime(sonic_data, output_dir / "v_prime.csv") calculate_T_s_prime(sonic_data, output_dir / "T_s_prime.csv")
def plot_autocorrelation(T, U, V, W): print("starting autocorrelations") auto_corr_lag_U = calculate_autocorrelation(U) print("finished u") auto_corr_lag_V = calculate_autocorrelation(V) print("finished v") auto_corr_lag_W = calculate_autocorrelation(W) print("finished w") auto_corr_lag_T = calculate_autocorrelation(T) print("finished T") dt = 1 / 20 # 20 Hz time = np.arange(1, int(len(U)*dt)) plt.plot(time, auto_corr_lag_U, label="U velocity") plt.plot(time, auto_corr_lag_V, label="V velocity") plt.plot(time, auto_corr_lag_W, label="W velocity") plt.plot(time, auto_corr_lag_T, label="Virtual Temperature") plt.ylabel('Autocorrelation') plt.xlabel("Lag (seconds)") plt.legend() plt.grid() plt.title("Autocorrelation vs. Lag") plt.savefig(path_util.get_project_root() / "AutocorrelationLag-{}.png".format(datetime.datetime.now()))
def _get_data_root_dir(): return get_project_root() / "data" / "2021 Final Project Data"
def get_output_root_dir(self): return get_project_root() / self.config["output_dirs"]["root"]
def main(test=True): input_dir = path_util.get_project_root( ) / "calculations" / "calc_from_intermediate" if test: input_dir = input_dir / "test" levels = [2, 5, 10] data = dict() print("Loading data") for level in levels: level_input_dir = input_dir / str(level) data[level] = pd.read_csv(level_input_dir / "results.csv", sep="\t", parse_dates=["time"]) output_dir = input_dir / "plots" output_dir.mkdir(parents=True, exist_ok=True) print("Making plots") #kinematic_temp_flux print("Kinematic temp flux") args = PlotVariables(column="kinematic_temp_flux", plot_title="Kinematic Temperature Flux (cov T_S, w)", y_label="Flux (K*m/s)", output_path=output_dir / "temp_flux.png") plot_variable(data, args, only_surface=True) # friction_velocity print("Friction velocity") args = PlotVariables(column="friction_velocity", plot_title="Friction Velocity (u*)", y_label="Friction Velocity (m/s)", output_path=output_dir / "friction_velocity.png") plot_variable(data, args) # H_s print("H_s") args = PlotVariables(column="H_s", plot_title="Sensible Heat Flux (H_s)", y_label="Flux (W/m^2)", output_path=output_dir / "sensible_heat_flux.png") plot_variable(data, args, only_surface=True) # tke print("tke") args = PlotVariables(column="tke", plot_title="Turbulent Kinetic Energy (tke)", y_label="tke (m^2/s^2)", output_path=output_dir / "tke.png") plot_variable(data, args) # w* print("w*") args = PlotVariables(column="w_star", plot_title="Deardorff/Convective Velocity (w*)", y_label="w* (m/s)", output_path=output_dir / "w_star.png") plot_variable(data, args) # L print("L") args = PlotVariables(column="L", plot_title="Monin-Obukhov Length (L)", y_label="L (m)", output_path=output_dir / "obukhov_length.png") plot_variable(data, args)
import numpy as np import matplotlib.pyplot as plt import pandas as pd import datetime from utils import path_util # file1 = r'C:\Users\makom\source\repos\EFD_Final_Project\EFD_Final_Project\select_fields\sonic_data_2' # file2 = r'C:\Users\makom\source\repos\EFD_Final_Project\EFD_Final_Project\select_fields\sonic_data_5' # file3 = r'C:\Users\makom\source\repos\EFD_Final_Project\EFD_Final_Project\select_fields\sonic_data_10' file_dir = path_util.get_project_root( ) / "data" / "2021 Final Project Data" / "SonicData" / "select_fields" file1 = file_dir / "sonic_data_2" file2 = file_dir / "sonic_data_5" file3 = file_dir / "sonic_data_10" def load_data(file): return pd.read_csv(file, sep="\t", index_col=0, parse_dates=["time"]) sonic2 = load_data(file1) sonic5 = load_data(file2) sonic10 = load_data(file3) def correct_time(data): data["time"] = data["time"] - datetime.timedelta(days=366) # data = data[(data["time"] >= pd.Timestamp('2018-09-13 21:00:00')) & # (data["time"] <= pd.Timestamp('2018-09-14 06:00:00'))] return data