for task in tasks: response = task.check_status() n_failed_shards = response['Status']['message']['num_failed_shards'] if n_failed_shards != 0: print(f'Warning: {n_failed_shards} failed shards for task ' f'{task.id}') output_file_urls = response['Status']['message']['output_file_urls'] detections_url = output_file_urls['detections'] fn = url_to_filename(detections_url) # Each taskgroup corresponds to one of our folders folder_name = folder_names[i_taskgroup] clean_folder_name = prepare_api_submission.clean_request_name( folder_name) assert (folder_name in fn) or (clean_folder_name in fn) assert 'chunk' in fn missing_images_fn = os.path.join(raw_api_output_folder, fn.replace('.json', '_missing.json')) missing_imgs = task.get_missing_images(verbose=True) ai4e_azure_utils.write_list_to_file(missing_images_fn, missing_imgs) num_missing_imgs = len(missing_imgs) if num_missing_imgs < max_tolerable_missing_images: continue print(f'Warning: {missing_imgs} missing images for task {task.id}') task_name = f'{base_task_name}_{folder_name}_{task.id}_missing_images' blob_name = f'api_inputs/{base_task_name}/{task_name}.json'
response['status']['message']['num_failed_shards']) # assert n_failed_shards == 0 if n_failed_shards != 0: print('Warning: {} failed shards for task {}'.format( n_failed_shards, task_id)) output_file_urls = prepare_api_submission.get_output_file_urls( response) detections_url = output_file_urls['detections'] fn = url_to_filename(detections_url) # Each task group corresponds to one of our folders assert (folder_names[i_task_group] in fn) or \ (prepare_api_submission.clean_request_name(folder_names[i_task_group]) in fn) assert 'chunk' in fn missing_images_fn = fn.replace('.json', '_missing.json') missing_images_fn = os.path.join(raw_api_output_folder, missing_images_fn) missing_images,non_images = \ prepare_api_submission.generate_resubmission_list( task_status_endpoint_url,task_id,missing_images_fn) if len(missing_images) < max_tolerable_missing_images: continue print('Warning: {} missing images for task {}'.format( len(missing_images), task_id))
task.id, i_task, i_taskgroup)) response = task.check_status() n_failed_shards = response['Status']['message']['num_failed_shards'] if n_failed_shards != 0: print('Warning: {} failed shards for task {}'.format( n_failed_shards, task.id)) output_file_urls = task.get_output_file_urls() detections_url = output_file_urls['detections'] detections_fn = url_to_filename(detections_url).replace(':', '_') # Each taskgroup corresponds to one of our folders folder_name = folder_names[i_taskgroup] clean_folder_name = prepare_api_submission.clean_request_name( folder_name.replace('.', '-')) assert (folder_name in detections_fn) or (clean_folder_name in detections_fn) assert 'chunk' in detections_fn # Check that we have (almost) all the images list_file = task.local_images_list_path with open(list_file, 'r') as f: submitted_images = json.load(f) missing_images_fn = os.path.join( raw_api_output_folder, detections_fn.replace('.json', '_missing.json')) missing_images = task.get_missing_images( submitted_images=submitted_images, verbose=True) missing_images_by_task.append(missing_images)