def main(): wandb.init(project="msc_thesis_hendrig") ap = argparse.ArgumentParser() ap.add_argument("data_path", help="Path to data root") ap.add_argument("vocabulary_path", help="Path to vocabulary file") ap.add_argument("config", help="Path to config file") ap.add_argument("-m", "--models", help="Directory to store trained models (optional)") ap.add_argument("-l", "--log", help="Path to store training log (optional)") ap.add_argument("-e", "--eval_only", help="Whether to run just the final model evaluation") args = ap.parse_args() config = yaml.safe_load(open(args.config)) wandb.config.update(args) wandb.config.update(config) lowest_memory_gpu = gpu_selector.GPUSelector().pick_gpu_lowest_memory() if lowest_memory_gpu is not None: os.environ["CUDA_VISIBLE_DEVICES"] = str(lowest_memory_gpu) print("Training with configuration:", config) data = data_loader.DataLoader(args.data_path, config["data"], vocabulary.Vocabulary(args.vocabulary_path)) if args.eval_only: if args.models is None or args.log is None: raise ValueError( "Must provide a path to pre-trained models when running final evaluation" ) test(data, config, args.models, args.log) else: train(data, config, args.models, args.log)
def main(): ap = argparse.ArgumentParser() ap.add_argument("data_path", help="Path to data root") ap.add_argument("vocabulary_path", help="Path to vocabulary file") ap.add_argument("config", help="Path to config file") ap.add_argument("-m", "--models", help="Directory to store trained models (optional)") ap.add_argument("-l", "--log", help="Path to store training log (optional)") ap.add_argument("-e", "--eval_only", help="Whether to run just the final model evaluation") args = ap.parse_args() config = yaml.safe_load(open(args.config)) print("Training with configuration:", config) data = data_loader.DataLoader(args.data_path, config["data"], vocabulary.Vocabulary(args.vocabulary_path)) if args.eval_only: if args.models is None or args.log is None: raise ValueError( "Must provide a path to pre-trained models when running final evaluation" ) test(data, config, args.models, args.log) else: train(data, config, args.models, args.log)
def __init__(self, action, debug_flag=False): self.debug_flag = debug_flag self.base_directory = os.path.dirname(__file__) # Conditions for Loading Context # Either GUI, TRAINING print('Starting the Face Recognition Module Context Creation for ', action) print('Creating Configuration Module in Context') # Load Configuration for UI and training self.configuration = config.Configuration(self.base_directory, debug_flag) self.configuration.load_configuration( os.path.join(os.path.dirname(__file__), "configuration/application/app.config")) print('Creating Tensorflow in Session') # Load tensorflow session for training # self.config = tf.ConfigProto() # self.config.gpu_options.allow_growth = False # self.sess = tf.InteractiveSession(config=self.config) print('Creating Data Module in Context') # Load Data Module which will do everything about data Reading, Writing, Separating data self.data_loader = data_loader.DataLoader(self.base_directory, self.configuration) print('Creating Model Module in Context') # Load Model module to store and restore model after training or testing time self.model_loader = model.Model(self.base_directory, self.configuration) print('Creating Information Module in Context') # Load Info Module to see the information about the module self.info = info.Information() print('Creating Pre Processing Module in Context') # Load Preprocessing Module will do the preprocessing self.preprocess = preprocess.Preprocessing(self.configuration) self.resultwritter = rs.ResultWriter(self.base_directory, self.configuration) if action == "TRAIN": print('Creating Data Augmentation Module in Context') # Load DataAugumentation Module will do the data augumentation self.augment = augment1.Augmentation(self.configuration) print('Creating data separation Module in context') self.data_separation = dataseparation.DataSeparation( self.configuration) print('Creating Model Preparation Module in Context') self.model_preparation = nnmodel.NNModel(self.base_directory, self.configuration) print('Creating Batch Module in Context') self.batch_prepare = pb.Prepare_Batch() print('Creating Evaluation Module in Context') # Load Detection Module if action == "GUI": print('Creating Face Detection Module in Context') self.detect = self.face_detector() print('Creating Face Recognition Module in Context') self.recognize = self.face_recognizer( self.configuration.recognizer_type) print('Creating Post Processing Module in Context') # Post Processing Module self.postprocess = postprocess.Postprocessing(self.data_loader)