def clean_cloudobjects(cloudobjects_data): file_location = cloudobjects_data['file_location'] data = cloudobjects_data['data'] logger.info('Going to clean cloudobjects') cos_to_clean = data['cos_to_clean'] storage_config = data['storage_config'] storage = Storage(storage_config=storage_config) for co in cos_to_clean: if co.backend == storage.backend: logging.info('Cleaning {}://{}/{}'.format(co.backend, co.bucket, co.key)) storage.delete_object(co.bucket, co.key) if os.path.exists(file_location): os.remove(file_location) logger.info('Finished')
def clean_file(file_name): file_location = os.path.join(CLEANER_DIR, file_name) if file_location in [CLEANER_LOG_FILE, CLEANER_PID_FILE]: return with open(file_location, 'rb') as pk: data = pickle.load(pk) if 'jobs_to_clean' in data: jobs_to_clean = data['jobs_to_clean'] storage_config = data['storage_config'] clean_cloudobjects = data['clean_cloudobjects'] storage = Storage(storage_config=storage_config) for job_key in jobs_to_clean: logger.info('Going to clean: {}'.format(job_key)) prefix = '/'.join([JOBS_PREFIX, job_key]) clean_bucket(storage, storage.bucket, prefix) if clean_cloudobjects: prefix = '/'.join([TEMP_PREFIX, job_key]) clean_bucket(storage, storage.bucket, prefix) if 'cos_to_clean' in data: logger.info('Going to clean cloudobjects') cos_to_clean = data['cos_to_clean'] storage_config = data['storage_config'] storage = Storage(storage_config=storage_config) for co in cos_to_clean: if co.backend == storage.backend: logging.info('Cleaning {}://{}/{}'.format( co.backend, co.bucket, co.key)) storage.delete_object(co.bucket, co.key) if os.path.exists(file_location): os.remove(file_location)