def run_experiment(parser, use_gpu): # parse experiment specific command line arguments parser.add_argument('--learning-rate', dest='learning_rate', type=float, default=0.01, help='Learning rate to use during training.') args, unknown = parser.parse_known_args() # pre-process data process_raw_data(use_gpu, force_pre_processing_overwrite=False) # run experiment training_file = "data/preprocessed/sample.txt.hdf5" validation_file = "data/preprocessed/sample.txt.hdf5" model = ExampleModel(21, args.minibatch_size, use_gpu=use_gpu) # embed size = 21 train_loader = contruct_dataloader_from_disk(training_file, args.minibatch_size) validation_loader = contruct_dataloader_from_disk(validation_file, args.minibatch_size) print("TRAIN LOADER CONTENT", train_loader) train_model_path = train_model(data_set_identifier="TRAIN", model=model, train_loader=train_loader, validation_loader=validation_loader, learning_rate=args.learning_rate, minibatch_size=args.minibatch_size, eval_interval=args.eval_interval, hide_ui=args.hide_ui, use_gpu=use_gpu, minimum_updates=args.minimum_updates) print(train_model_path)
def run_experiment(): # pre-process data process_raw_data(False, force_pre_processing_overwrite=False) # run experiment # training_file = args.input_file training_file = settings.BASE_DIR + "/protAPI/proteinnet/data/preprocessed/sample.txt.hdf5" validation_file = settings.BASE_DIR + "/protAPI/proteinnet/data/preprocessed/sample.txt.hdf5" # validation_file = args.input_file model = MyModel(21, 5, use_gpu=False) # embed size = 21 train_loader = contruct_dataloader_from_disk(training_file, 5) validation_loader = contruct_dataloader_from_disk(validation_file, 5) train_model_path = train_model(data_set_identifier="TRAINXX", model=model, train_loader=train_loader, validation_loader=validation_loader, learning_rate=0.1, minibatch_size=5, eval_interval=5, hide_ui=True, use_gpu=False, minimum_updates=1) # Epochs print("Completed training, trained model stored at:") print(train_model_path)
def run_experiment(parser, use_gpu): # parse experiment specific command line arguments parser.add_argument('--learning-rate', dest='learning_rate', type=float, default=0.01, help='Learning rate to use during training.') parser.add_argument('--input-file', dest='input_file', type=str, default='data/preprocessed/protein_net_testfile.txt.hdf5') args, _unknown = parser.parse_known_args() # pre-process data process_raw_data(use_gpu, force_pre_processing_overwrite=False) # run experiment training_file = args.input_file validation_file = args.input_file model = MyModel(21, args.minibatch_size, use_gpu=use_gpu) # embed size = 21 train_loader = contruct_dataloader_from_disk(training_file, args.minibatch_size) validation_loader = contruct_dataloader_from_disk(validation_file, args.minibatch_size) train_model_path = train_model(data_set_identifier="TRAIN", model=model, train_loader=train_loader, validation_loader=validation_loader, learning_rate=args.learning_rate, minibatch_size=args.minibatch_size, eval_interval=args.eval_interval, hide_ui=args.hide_ui, use_gpu=use_gpu, minimum_updates=args.minimum_updates) print("Completed training, trained model stored at:") print(train_model_path)
def run_training(model_name, epochs, author, desc=""): # pre-process data process_raw_data(False, force_pre_processing_overwrite=False) model_name = model_name.replace(' ', '') # run experiment # training_file = args.input_file training_file = settings.BASE_DIR + "/protAPI/proteinnet/data/preprocessed/sample.txt.hdf5" validation_file = settings.BASE_DIR + "/protAPI/proteinnet/data/preprocessed/sample.txt.hdf5" # validation_file = args.input_file # try: dinamic_model = getattr( importlib.import_module("protAPI.proteinnet.custom_models"), model_name) model = dinamic_model(21, use_gpu=False) # embed size = 21 train_loader = contruct_dataloader_from_disk(training_file, 5) validation_loader = contruct_dataloader_from_disk(validation_file, 5) train_model_path = train_model(data_set_identifier="TRAINXX", model=model, train_loader=train_loader, validation_loader=validation_loader, learning_rate=0.1, minibatch_size=5, eval_interval=5, hide_ui=True, use_gpu=False, minimum_updates=epochs) # Epochs print("Completed training, trained model stored at:") print(train_model_path) model_trained = ModelTrained(author=author, name=model_name, description=desc, file=train_model_path) model_trained.save() send_report_mail("*****@*****.**", title="Entrenamiento Listo", html="", file_paths=[train_model_path], text="Tu modelo esta listo para que lo pruebes ")
def run_training(model_name, epochs, author, desc=""): # pre-process data epochs = int(epochs) author = User.objects.get(pk=author) # model_from_db = ModelStructure.objects.get(name=model_name) # file_path = settings.BASE_DIR + "/protApi/proteinnet/custom_models.py" # f = open(file_path, "a") # f.write(model_from_db.code) # f.close() process_raw_data(False, force_pre_processing_overwrite=False) model_name = model_name.replace(' ', '') # run experiment # training_file = args.input_file training_file = settings.BASE_DIR + "/protAPI/proteinnet/data/preprocessed/sample.txt.hdf5" validation_file = settings.BASE_DIR + "/protAPI/proteinnet/data/preprocessed/sample.txt.hdf5" # validation_file = args.input_file try: dinamic_model = getattr( importlib.import_module("protAPI.proteinnet.custom_models"), model_name) model = dinamic_model(21, use_gpu=False) # embed size = 21 train_loader = contruct_dataloader_from_disk(training_file, 5) validation_loader = contruct_dataloader_from_disk(validation_file, 5) train_model_path = train_model(data_set_identifier="TRAINXX", model=model, train_loader=train_loader, validation_loader=validation_loader, learning_rate=0.1, minibatch_size=5, eval_interval=5, hide_ui=True, use_gpu=False, minimum_updates=epochs) # Epochs print("Completed training, trained model stored at:") print(train_model_path) model_trained = ModelTrained(author=author, name=model_name, description=desc, file=train_model_path) model_trained.save() print("Sending mail to", author.email) send_report_mail(author.email, title="Entrenamiento Listo", html="", file_paths=[train_model_path], text="Tu modelo esta listo para que lo pruebes ") except Exception as e: print("Error en entrenamiento:", e) print("Sending mail to", author.email) send_report_mail( author.email, title="Entrenamiento Fallido", html= "Se detecto un error al intentar entrenar tu modelo: <h5 style='color:red'>" + str(e) + "</h5>", file_paths=[], text="Se detecto un error al intentar entrenar tu modelo:\n")