def existing_ec2_instance_connection(request, ec2_key_file_name, ec2_user_name, ec2_public_ip): """ Fixture to establish connection with EC2 instance if necessary :param request: pytest test request :param ec2_key_file_name: ec2 key file name :param ec2_user_name: username of the ec2 instance to login :param ec2_public_ip: public ip address of the instance :return: Fabric connection object """ conn = Connection( user=ec2_user_name, host=ec2_public_ip, connect_kwargs={"key_filename": [ec2_key_file_name]}, ) random.seed(f"{datetime.datetime.now().strftime('%Y%m%d%H%M%S%f')}") unique_id = random.randint(1, 100000) ec2_key_name = ec2_public_ip.split(".")[0] artifact_folder = f"{ec2_key_name}-{unique_id}-folder" s3_test_artifact_location = test_utils.upload_tests_to_s3(artifact_folder) def delete_s3_artifact_copy(): test_utils.delete_uploaded_tests_from_s3(s3_test_artifact_location) request.addfinalizer(delete_s3_artifact_copy) conn.run( f"aws s3 cp --recursive {test_utils.TEST_TRANSFER_S3_BUCKET}/{artifact_folder} $HOME/container_tests" ) conn.run( f"mkdir -p $HOME/container_tests/logs && chmod -R +x $HOME/container_tests/*" ) return conn
def ec2_connection(request, ec2_instance, ec2_key_name, region): """ Fixture to establish connection with EC2 instance if necessary :param request: pytest test request :param ec2_instance: ec2_instance pytest fixture :param ec2_key_name: unique key name :param region: Region where ec2 instance is launched :return: Fabric connection object """ instance_id, instance_pem_file = ec2_instance LOGGER.info( f"Instance ip_address: {ec2_utils.get_public_ip(instance_id, region)}") user = ec2_utils.get_instance_user(instance_id, region=region) conn = Connection(user=user, host=ec2_utils.get_public_ip(instance_id, region), connect_kwargs={"key_filename": [instance_pem_file]}) artifact_folder = f"{ec2_key_name}-folder" s3_test_artifact_location = test_utils.upload_tests_to_s3(artifact_folder) def delete_s3_artifact_copy(): test_utils.delete_uploaded_tests_from_s3(s3_test_artifact_location) request.addfinalizer(delete_s3_artifact_copy) conn.run( f"aws s3 cp --recursive {test_utils.TEST_TRANSFER_S3_BUCKET}/{ec2_key_name}-folder $HOME/container_tests" ) conn.run( f"mkdir -p $HOME/container_tests/logs && chmod -R +x $HOME/container_tests/*" ) return conn
def training_cmd(request, ecs_cluster_name, training_script): artifact_folder = f"{ecs_cluster_name}-folder" s3_test_artifact_location = test_utils.upload_tests_to_s3(artifact_folder) def delete_s3_artifact_copy(): test_utils.delete_uploaded_tests_from_s3(s3_test_artifact_location) request.addfinalizer(delete_s3_artifact_copy) return ecs_utils.build_ecs_training_command(s3_test_artifact_location, training_script)
def ec2_connection(request, ec2_instance, ec2_key_name, ec2_instance_type, region): """ Fixture to establish connection with EC2 instance if necessary :param request: pytest test request :param ec2_instance: ec2_instance pytest fixture :param ec2_key_name: unique key name :param ec2_instance_type: ec2_instance_type pytest fixture :param region: Region where ec2 instance is launched :return: Fabric connection object """ instance_id, instance_pem_file = ec2_instance region = P3DN_REGION if ec2_instance_type == "p3dn.24xlarge" else region ip_address = ec2_utils.get_public_ip(instance_id, region=region) LOGGER.info(f"Instance ip_address: {ip_address}") user = ec2_utils.get_instance_user(instance_id, region=region) LOGGER.info(f"Connecting to {user}@{ip_address}") conn = Connection( user=user, host=ip_address, connect_kwargs={"key_filename": [instance_pem_file]}, connect_timeout=18000, ) random.seed(f"{datetime.datetime.now().strftime('%Y%m%d%H%M%S%f')}") unique_id = random.randint(1, 100000) artifact_folder = f"{ec2_key_name}-{unique_id}-folder" s3_test_artifact_location = test_utils.upload_tests_to_s3(artifact_folder) def delete_s3_artifact_copy(): test_utils.delete_uploaded_tests_from_s3(s3_test_artifact_location) request.addfinalizer(delete_s3_artifact_copy) conn.run( f"aws s3 cp --recursive {test_utils.TEST_TRANSFER_S3_BUCKET}/{artifact_folder} $HOME/container_tests" ) conn.run( f"mkdir -p $HOME/container_tests/logs && chmod -R +x $HOME/container_tests/*" ) # Log into ECR if we are in canary context if test_utils.is_canary_context(): public_registry = test_utils.PUBLIC_DLC_REGISTRY test_utils.login_to_ecr_registry(conn, public_registry, region) return conn