def retrieve_test_objects_to_pod(podobj, target_dir): """ Downloads all the test objects to a given directory in a given pod. Args: podobj (OCS): The pod object to download the objects to target_dir: The fully qualified path of the download target folder Returns: list: A list of the downloaded objects' names """ # Download test objects from the public bucket downloaded_objects = [] # Retrieve a list of all objects on the test-objects bucket and downloads them to the pod podobj.exec_cmd_on_pod(command=f'mkdir {target_dir}') public_s3 = boto3.resource('s3') with ThreadPoolExecutor() as p: for obj in public_s3.Bucket(constants.TEST_FILES_BUCKET).objects.all(): logger.info(f'Downloading {obj.key} from AWS test bucket') p.submit(podobj.exec_cmd_on_pod, command=f'sh -c "' f'wget -P {target_dir} ' f'https://{constants.TEST_FILES_BUCKET}.s3.amazonaws.com/{obj.key}"' ) downloaded_objects.append(obj.key) return downloaded_objects
def write_individual_s3_objects(mcg_obj, awscli_pod, bucket_factory, downloaded_files, target_dir, bucket_name=None): """ Writes objects one by one to an s3 bucket Args: mcg_obj (obj): An MCG object containing the MCG S3 connection credentials awscli_pod (pod): A pod running the AWSCLI tools bucket_factory: Calling this fixture creates a new bucket(s) downloaded_files (list): List of downloaded object keys target_dir (str): The fully qualified path of the download target folder bucket_name (str): Name of the bucket (default: none) """ bucketname = bucket_name or bucket_factory(1)[0].name logger.info(f'Writing objects to bucket') for obj_name in downloaded_files: full_object_path = f"s3://{bucketname}/{obj_name}" copycommand = f"cp {target_dir}{obj_name} {full_object_path}" assert 'Completed' in awscli_pod.exec_cmd_on_pod( command=craft_s3_command(mcg_obj, copycommand), out_yaml_format=False, secrets=[ mcg_obj.access_key_id, mcg_obj.access_key, mcg_obj.s3_endpoint ])
def del_objects(uploaded_objects_paths, awscli_pod, mcg_obj): """ Deleting objects from bucket Args: uploaded_objects_paths (list): List of object paths awscli_pod (pod): A pod running the AWSCLI tools mcg_obj (obj): An MCG object containing the MCG S3 connection credentials """ for uploaded_filename in uploaded_objects_paths: logger.info(f'Deleting object {uploaded_filename}') awscli_pod.exec_cmd_on_pod( command=craft_s3_command(mcg_obj, "rm " + uploaded_filename), secrets=[mcg_obj.access_key_id, mcg_obj.access_key, mcg_obj.s3_endpoint] )
def retrieve_test_objects_to_pod(podobj, target_dir): """ Downloads all the test objects to a given directory in a given pod. Args: podobj (OCS): The pod object to download the objects to target_dir: The fully qualified path of the download target folder Returns: list: A list of the downloaded objects' names """ sync_object_directory(podobj, f's3://{constants.TEST_FILES_BUCKET}', target_dir) downloaded_objects = podobj.exec_cmd_on_pod(f'ls -A1 {target_dir}').split(' ') logger.info(f'Downloaded objects: {downloaded_objects}') return downloaded_objects
def sync_object_directory(podobj, src, target, mcg_obj=None): """ Syncs objects between a target and source directories Args: podobj (OCS): The pod on which to execute the commands and download the objects to src (str): Fully qualified object source path target (str): Fully qualified object target path mcg_obj (MCG, optional): The MCG object to use in case the target or source are in an MCG """ logger.info(f'Syncing all objects and directories from {src} to {target}') retrieve_cmd = f'sync {src} {target}' if mcg_obj: secrets = [mcg_obj.access_key_id, mcg_obj.access_key, mcg_obj.s3_endpoint] else: secrets = None podobj.exec_cmd_on_pod( command=craft_s3_command(mcg_obj, retrieve_cmd), out_yaml_format=False, secrets=secrets ), 'Failed to sync objects'
def abort_all_multipart_upload(s3_obj, bucketname, object_key): """ Abort all Multipart Uploads for this Bucket Args: s3_obj (obj): MCG or OBC object bucketname (str): Name of the bucket object_key (str): Unique object Identifier Returns: list : List of aborted upload ids """ multipart_list = s3_obj.s3_client.list_multipart_uploads(Bucket=bucketname) logger.info(f"Aborting{len(multipart_list)} uploads") if "Uploads" in multipart_list: return [ s3_obj.s3_client.abort_multipart_upload( Bucket=bucketname, Key=object_key, UploadId=upload["UploadId"] ) for upload in multipart_list["Uploads"] ] else: return None