def main(): print('Fetching training configuration') train_config = fetch_json(TRAIN_CONFIG) # Train model if train_config['task_type'] == 'classification': print('Starting classification') accuracy, classes, model_path, acc_plot_path, remove_paths = train_classification(train_config) metadata = {'classes': classes} else: print('Starting sentiment analysis') accuracy, model_path, metadata_path, acc_plot_path, remove_paths = train_sa(train_config) metadata = {'metadata_filename': metadata_path} # Deploy model print('Deploying model') setup_inference( train_config['token'], train_config['task_type'], accuracy, model_path, acc_plot_path, metadata, ) # Clear files for remove_path in remove_paths: shutil.rmtree(remove_path) # Delete training config from S3 # This will also shutdown the instance delete_object(TRAIN_CONFIG)
def main(username): print(" In server training ") os.makedirs(os.path.join(DATA_PATH, 'checkpoints')) print("Created /data/checkpoints folders") # # Download user file userdata_filename = os.path.join(DATA_PATH, f'{username}.json') download_file( os.path.join(TRAINING_CONFIG, f'{username}.json'), userdata_filename, ) (task, username, model_name, ratio, is_reducelrscheduler, patience, factor, min_lr, optimizer, batch_size, learning_rate, epochs, dataset_filename) = get_config_data(userdata_filename) # Download dataset download_file( os.path.join(TRAINING_CONFIG, dataset_filename), os.path.join(DATA_PATH, dataset_filename), ) print(" Completed fetching data from s3 ") inference_data = {} if task == 'image': inference_data = train_image_classification( username, model_name, ratio, is_reducelrscheduler, patience, factor, min_lr, optimizer, batch_size, learning_rate, epochs, dataset_filename) elif task == 'text': inference_data = train_sentiment_analysis(username, model_name, ratio, is_reducelrscheduler, patience, factor, min_lr, optimizer, batch_size, learning_rate, epochs, dataset_filename) # Upload data to S3 upload_model_data(task, username) print('Uploaded inference data to s3') # Update inference json inference_config = fetch_json(INFERENCE_CONFIG) inference_config[username] = inference_data inference_config[username]['created'] = datetime.now().strftime( '%d-%m-%y %H:%M') put_object(INFERENCE_CONFIG, inference_config) print("Added user information to inference.json and uploaded to s3") # Delete train data from S3 delete_object(os.path.join(TRAINING_CONFIG, dataset_filename)) delete_object(os.path.join(TRAINING_CONFIG, f'{username}.json')) print("Deleted user data from training folder in s3") # Delete data shutil.rmtree(DATA_PATH) print("Deleted data folder")
def setup_inference(token, task_type, accuracy, model_path, acc_plot_path, metadata): inference_config = fetch_json(INFERENCE_CONFIG) # Upload model s3_model_path = os.path.join(task_type, os.path.basename(model_path)) upload_file(model_path, s3_model_path) if task_type == 'sentimentanalysis': s3_meta_path = os.path.join(task_type, os.path.basename(metadata['metadata_filename'])) upload_file(metadata['metadata_filename'], s3_meta_path) metadata['metadata_filename'] = s3_meta_path # Upload new inference config to S3 inference_config[token] = { 'task_type': task_type, 'model_filename': s3_model_path, **metadata, 'accuracy': accuracy, 'accuracy_plot': image_to_base64(acc_plot_path), 'created': datetime.now().strftime('%d-%m-%y %H:%M') } put_object(INFERENCE_CONFIG, inference_config)
optimizer, batch_size, learning_rate, epochs, dataset_filename) # Upload data to S3 upload_model_data(task, username) print('Uploaded inference data to s3') # Update inference json inference_config = fetch_json(INFERENCE_CONFIG) inference_config[username] = inference_data inference_config[username]['created'] = datetime.now().strftime( '%d-%m-%y %H:%M') put_object(INFERENCE_CONFIG, inference_config) print("Added user information to inference.json and uploaded to s3") # Delete train data from S3 delete_object(os.path.join(TRAINING_CONFIG, dataset_filename)) delete_object(os.path.join(TRAINING_CONFIG, f'{username}.json')) print("Deleted user data from training folder in s3") # Delete data shutil.rmtree(DATA_PATH) print("Deleted data folder") if __name__ == '__main__': server_config = fetch_json(STATUS_CONFIG) if not server_config['dev_mode']: main(server_config['username'])
accuracy, classes, model_path, acc_plot_path, remove_paths = train_classification(train_config) metadata = {'classes': classes} else: print('Starting sentiment analysis') accuracy, model_path, metadata_path, acc_plot_path, remove_paths = train_sa(train_config) metadata = {'metadata_filename': metadata_path} # Deploy model print('Deploying model') setup_inference( train_config['token'], train_config['task_type'], accuracy, model_path, acc_plot_path, metadata, ) # Clear files for remove_path in remove_paths: shutil.rmtree(remove_path) # Delete training config from S3 # This will also shutdown the instance delete_object(TRAIN_CONFIG) if __name__ == '__main__': if not fetch_json(STATUS_CONFIG)['dev_mode']: main()