def create_trainer_and_start(self, out_path, epochs=1, load_test_set=False): dlds = DataloaderFlowfrontSensor(sensor_indizes=((1, 8), (1, 8))) m = ModelTrainer( lambda: S20DryspotModelFCWide(), data_source_paths=tr_resources.get_data_paths_debug(), save_path=out_path, load_datasets_path=self.torch_dataset_resources / "reference_datasets", cache_path=None, num_validation_samples=8, num_test_samples=8, num_workers=0, epochs=epochs, data_processing_function=dlds.get_flowfront_sensor_bool_dryspot, data_gather_function=dg.get_filelist_within_folder_blacklisted, loss_criterion=torch.nn.BCELoss(), optimizer_function=lambda params: torch.optim.AdamW(params, lr=1e-4), classification_evaluator_function=lambda summary_writer: BinaryClassificationEvaluator(summary_writer=summary_writer), load_test_set_in_training_mode=load_test_set, data_root=test_resources.test_src_dir, ) return m
import pickle from pathlib import Path import numpy as np import Resources.training as r from Pipeline import torch_datagenerator as td from Pipeline.data_gather import get_filelist_within_folder_blacklisted from Pipeline.data_loader_flowfront_sensor import DataloaderFlowfrontSensor if __name__ == "__main__": dlds = DataloaderFlowfrontSensor(sensor_indizes=((1, 8), (1, 8))) generator = td.LoopingDataGenerator(r.get_data_paths_debug(), get_filelist_within_folder_blacklisted, dlds.get_flowfront_sensor_bool_dryspot, num_validation_samples=131072, num_test_samples=1048576, batch_size=131072, split_load_path=r.dataset_split, split_save_path=Path(), num_workers=75, looping_strategy=None) mean = 0. std = 0. j = 0 for i, (inputs, _, _) in enumerate(generator): abs_speed_at_sensors = np.linalg.norm(inputs, axis=2) mean_at_sensors = abs_speed_at_sensors.mean(axis=0) mean = mean + mean_at_sensors std_at_sensors = abs_speed_at_sensors.std(axis=0) std = std + std_at_sensors
if __name__ == "__main__": args = read_cmd_params_attention() if "swt-dgx" in socket.gethostname(): batch_size = 32 dataset_paths = r.get_regular_sampled_data_paths() num_workers = 35 num_val = 100 num_test = 400 data_root = r.data_root_every_step img_save_path = Path( "/cfs/home/s/c/schroeni/Images/FlowFrontToFiber/TransPretrained") chpkt = r"/cfs/share/cache/output_schroeni/2021-02-17_14-54-44/checkpoint.pth" else: batch_size = 8 dataset_paths = r.get_data_paths_debug() num_workers = 20 num_val = 2 num_test = 1 data_root = r.data_root img_save_path = Path(r"C:\Users\schroeni\CACHE\Saved_Imgs\FFtoPerm") chpkt = r"X:\cache\output_schroeni\2021-02-17_14-54-44\checkpoint.pth" dl = DataloaderImageSequences() m = ModelTrainer( lambda: OptimusPrime_c2D(batch_size), dataset_paths, r.save_path, cache_path=r.cache_path, batch_size=batch_size, epochs=150,
import Resources.training as r from Models.erfh5_fullyConnected import S1140DryspotModelFCWide from Pipeline.data_gather import get_filelist_within_folder_blacklisted from Pipeline.data_loader_flowfront_sensor import DataloaderFlowfrontSensor from Trainer.ModelTrainer import ModelTrainer from Trainer.evaluation import BinaryClassificationEvaluator from Utils.training_utils import read_cmd_params if __name__ == "__main__": args = read_cmd_params() dlds = DataloaderFlowfrontSensor(sensor_indizes=((0, 1), (0, 1)), frame_count=1, use_binary_sensor_only=True) m = ModelTrainer(lambda: S1140DryspotModelFCWide(), data_source_paths=r.get_data_paths_debug(), save_path=r.save_path, dataset_split_path=r.dataset_split, cache_path=r.cache_path, batch_size=2048, train_print_frequency=100, epochs=100, num_workers=75, num_validation_samples=512, # 131072, num_test_samples=1024, # 1048576, data_processing_function=dlds.get_flowfront_sensor_bool_dryspot, data_gather_function=get_filelist_within_folder_blacklisted, loss_criterion=torch.nn.BCELoss(), optimizer_function=lambda params: torch.optim.AdamW(params, lr=1e-4), classification_evaluator_function=lambda: BinaryClassificationEvaluator(), dont_care_num_samples=True
import pickle from pathlib import Path import numpy as np import Resources.training as r from Pipeline import torch_datagenerator as td from Pipeline.data_gather import get_filelist_within_folder_blacklisted from Pipeline.data_loader_flowfront_sensor import DataloaderFlowfrontSensor if __name__ == "__main__": dlds = DataloaderFlowfrontSensor(sensor_indizes=((1, 8), (1, 8))) generator = td.LoopingDataGenerator( r.get_data_paths_debug(), get_filelist_within_folder_blacklisted, dlds.get_flowfront_sensor_bool_dryspot, num_validation_samples=131072, num_test_samples=1048576, batch_size=131072, split_load_path=r.datasets_dryspots, split_save_path=Path(), num_workers=75, looping_strategy=None ) mean = 0. std = 0. j = 0 for i, (inputs, _, _) in enumerate(generator): abs_speed_at_sensors = np.linalg.norm(inputs, axis=2) mean_at_sensors = abs_speed_at_sensors.mean(axis=0) mean = mean + mean_at_sensors