def clean_executor_jobs(executor_id, executor_data): storage = None prefix = '/'.join([JOBS_PREFIX, executor_id]) for file_data in executor_data: file_location = file_data['file_location'] data = file_data['data'] storage_config = data['storage_config'] clean_cloudobjects = data['clean_cloudobjects'] if not storage: storage = Storage(storage_config=storage_config) logger.info( f'Cleaning jobs {", ".join([job_key for job_key in data["jobs_to_clean"]])}' ) objects = storage.list_keys(storage.bucket, prefix) objects_to_delete = [ key for key in objects if '-'.join( key.split('/')[1].split('-')[0:3]) in data['jobs_to_clean'] ] while objects_to_delete: storage.delete_objects(storage.bucket, objects_to_delete) time.sleep(5) objects = storage.list_keys(storage.bucket, prefix) objects_to_delete = [ key for key in objects if '-'.join( key.split('/')[1].split('-')[0:3]) in data['jobs_to_clean'] ] if clean_cloudobjects: for job_key in data['jobs_to_clean']: prefix = '/'.join([TEMP_PREFIX, job_key]) clean_bucket(storage, storage.bucket, prefix) if os.path.exists(file_location): os.remove(file_location) logger.info('Finished')
def clean_functions(functions_data): file_location = functions_data['file_location'] data = functions_data['data'] executor_id = data['fn_to_clean'] logger.info(f'Going to clean functions from {executor_id}') storage_config = data['storage_config'] storage = Storage(storage_config=storage_config) prefix = '/'.join([JOBS_PREFIX, executor_id]) + '/' key_list = storage.list_keys(storage.bucket, prefix) storage.delete_objects(storage.bucket, key_list) if os.path.exists(file_location): os.remove(file_location) logger.info('Finished')
def delete_objects_by_prefix(storage: Storage, bucket: str, prefix: str): keys = storage.list_keys(bucket, prefix) storage.delete_objects(bucket, keys) logger.info(f'Removed {len(keys)} objects from {storage.backend}://{bucket}/{prefix}')