def test_copy_file(): try: testdir = "test-copy" # TODO: how do I bootstrap this test? source_key = f"{testdir}/{TEST_OBJECT}" s3_client().put_object( Bucket=TEST_BUCKET, Key=source_key, Body=b'Test Object' ) target_dir = f"{testdir}/target_dir" copy_file( source_key=source_key, target_dir=target_dir, bucket=TEST_BUCKET ) target_key = f"{target_dir}/{TEST_OBJECT}" assert(object_key_exists(object_key=target_key)) response = s3_client().list_objects_v2(Bucket=TEST_BUCKET, Prefix=testdir) for obj in response['Contents']: logger.debug(f"Deleting {obj['Key']}") s3_client().delete_object(Bucket=TEST_BUCKET, Key=obj['Key']) assert (not object_key_exists(object_key=testdir)) except Exception as e: logger.error(e) assert False
def upload_test_file( test_bucket=TEST_BUCKET, test_kg=TEST_KG_ID, test_file_path=TEST_SMALL_FILE_PATH, test_subfolder='' ): """ :param test_bucket: :param test_kg: :param test_file_path: :param test_subfolder: :return: """ # NOTE: file must be read in binary mode! logger.debug(f"upload_test_file(): '{test_file_path}' in '{test_kg}' of '{test_bucket}'") with open(test_file_path, 'rb') as test_file: content_location, _ = with_version(get_object_location)(test_kg) content_location = f"{content_location}{test_subfolder}" test_file_object_key = get_object_key(content_location, test_file.name) # only create the object key if it doesn't already exist? if not object_key_exists(object_key=test_file_object_key, bucket_name=test_bucket): upload_file( bucket=test_bucket, object_key=test_file_object_key, source=test_file ) assert object_key_exists(object_key=test_file_object_key, bucket_name=test_bucket) return test_file_object_key
def test_upload_file(bucket_name: str = TEST_BUCKET, client=s3_client): """ Test for uploading of file to S3. :param client: :param bucket_name: :return: """ upload_file(bucket_name=bucket_name, source_file=TEST_SMALL_FILE_PATH, target_object_key=TEST_SMALL_FILE_KEY, client=client) # successful upload? assert (object_key_exists(object_key=TEST_SMALL_FILE_KEY)) # clean up after test delete_object(bucket_name=bucket_name, target_object_key=TEST_SMALL_FILE_KEY, client=client) with open(TEST_SMALL_FILE_PATH, mode='rb') as test_fd: upload_file(bucket_name=bucket_name, source_file=test_fd, target_object_key=TEST_SMALL_FILE_KEY, client=client) # successful upload? assert (object_key_exists(object_key=TEST_SMALL_FILE_KEY)) # clean up after test delete_object(bucket_name=bucket_name, target_object_key=TEST_SMALL_FILE_KEY, client=client)
def test_s3_local_copy_to_new_key_in_same_bucket(): src_test_key = upload_test_file() assert (object_key_exists(object_key=src_test_key)) tgt_test_key = f"{src_test_key}_copy" copy(source_key=src_test_key, target_key=tgt_test_key) assert (object_key_exists(object_key=tgt_test_key)) if not KEEP_TEST_FILES: delete_test_file(src_test_key) delete_test_file(tgt_test_key)
def test_s3_unix_remote_copy_to_new_key_in_different_bucket_and_account(): logger.debug( "Entering test_s3_local_copy_to_new_key_in_different_bucket_and_account()" ) target_assumed_role, target_client, target_bucket = get_remote_client() logger.debug("upload test file") src_test_key = upload_test_file() logger.debug("start remote copy") # Expect the local 'src_test_key' resource # to be copied into the default remote bucket? remote_copy(source_key=src_test_key, target_key=src_test_key, source_bucket=TEST_BUCKET, target_bucket=target_bucket, target_client=target_client) logger.debug( f"list contents of remote_copy() target bucket '{target_bucket}'") list_files(bucket_name=target_bucket, client=target_client) logger.debug( "end remote copy - check existence of source key in target bucket (with target assumed role)" ) assert object_key_exists(object_key=src_test_key, bucket_name=target_bucket, assumed_role=target_assumed_role) if not KEEP_TEST_FILES: logger.debug(f"Deleting '{src_test_key}' in '{target_bucket}'") delete_test_file(test_object_key=src_test_key, test_bucket=target_bucket, test_client=target_client) logger.debug( "Exiting test_s3_local_copy_to_new_key_in_different_bucket_and_account()" )
def test_download_file(bucket_name: str = TEST_BUCKET, client=s3_client): """ Test for downloading of a S3 object to a target file. :param client: :param bucket_name: :return: """ # ensure that a test file exists for downloading src_test_key = upload_test_file() assert (object_key_exists(object_key=src_test_key)) # Test first downloading to a file of given name download_file(bucket_name=bucket_name, source_object_key=src_test_key, target_file=TEST_SMALL_FILE, client=client) # successful download? assert isfile(TEST_SMALL_FILE) # Clean up test file remove(TEST_SMALL_FILE) # Test next downloading the test object to write to a open file object of a given name with open(TEST_SMALL_FILE, 'wb') as test_fd: download_file(bucket_name=bucket_name, source_object_key=src_test_key, target_file=test_fd, client=client) # successful download? assert isfile(TEST_SMALL_FILE) # Clean up test file remove(TEST_SMALL_FILE) # Clean up test object in S3 delete_object(bucket_name=bucket_name, target_object_key=src_test_key, client=client)