) # NETWORK SETTINGS # Data loaders = myutils.get_module(args.net_dir, 'loaders') dataloaders, dataset_sizes = loaders.get_loaders( dfs, mean, std, size=params.size, batch_size=params.batch_size, num_workers=params.num_workers) # Net net = myutils.get_network(args.net_dir, params.network) optimizer = myutils.get_optimizer(params.optimizer, net, params.learning_rate, params.momentum, params.weight_decay) # Schedulers #vgg if params.network.startswith('vgg'): scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, factor=0.1, mode='max') #efficientnetb if params.network.startswith('efficientnetb'): scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=2.4, gamma=0.97) #resnet if params.network.startswith('resnet'): scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer,
fname = os.path.join(args.data_dir, f'val{fold}.csv') val = pd.read_csv(fname) dfs['train'] = train dfs['val'] = val # NETWORK SETTINGS # Data loaders = myutils.get_loaders(args.net_dir, 'loaders') dataloaders, dataset_sizes = loaders.get_loaders( dfs, size=params.size, batch_size=params.batch_size, num_workers=params.num_workers) # Net net = myutils.get_network(args.net_dir, params.network) optimizer = myutils.get_optimizer(params.optimizer, net, params.learning_rate) scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=7, gamma=0.1) # Loss function weight = myutils.get_weight(train) criterion = myutils.get_loss_fn(args.net_dir, params.network, weight) logging_process.info( f'Model: {args.model_dir}\tFile: train{fold}.csv\tWeight: {weight}' ) # Train print(f'Fold {fold}') print('-' * 10) logging_process.info( f'Model: {args.model_dir}\tFold: {fold}, training has started for {params.num_epochs} epochs'