def create_raw_upload_image_session( images: List[str], delete_file=False, imageset=None, annotationset=None ) -> Tuple[RawImageUploadSession, Dict[str, RawImageFile]]: upload_session = RawImageUploadSession(imageset=imageset, annotationset=annotationset) uploaded_images = {} for image in images: staged_file = create_file_from_filepath(RESOURCE_PATH / image) image = RawImageFile.objects.create( upload_session=upload_session, filename=staged_file.name, staged_file_id=staged_file.uuid, ) uploaded_images[staged_file.name] = image if delete_file: StagedAjaxFile( uploaded_images["image10x10x10.zraw"].staged_file_id).delete() upload_session.save() return upload_session, uploaded_images
def _handle_raw_files( *, consumed_files: Set[Path], file_errors: Dict[Path, List[str]], base_directory: Path, upload_session: RawImageUploadSession, ): upload_session.import_result = { "consumed_files": [str(f.relative_to(base_directory)) for f in consumed_files], "file_errors": { str(k.relative_to(base_directory)): v for k, v in file_errors.items() if k not in consumed_files }, } if upload_session.import_result["file_errors"]: n_errors = len(upload_session.import_result["file_errors"]) upload_session.error_message = ( f"{n_errors} file{pluralize(n_errors)} could not be imported") if upload_session.creator: Notification.send( type=NotificationType.NotificationTypeChoices. IMAGE_IMPORT_STATUS, message=f"failed with {n_errors} error{pluralize(n_errors)}", action_object=upload_session, )
def create_raw_upload_image_session( images: List[str], delete_file=False, imageset=None, annotationset=None ) -> Tuple[RawImageUploadSession, Dict[str, RawImageFile]]: upload_session = RawImageUploadSession( imageset=imageset, annotationset=annotationset ) uploaded_images = {} for image in images: staged_file = create_file_from_filepath(RESOURCE_PATH / image) image = RawImageFile.objects.create( upload_session=upload_session, filename=staged_file.name, staged_file_id=staged_file.uuid, ) uploaded_images[staged_file.name] = image if delete_file: StagedAjaxFile( uploaded_images["image10x10x10.zraw"].staged_file_id ).delete() upload_session.save() return upload_session, uploaded_images
def _handle_raw_files( *, input_files: Set[Path], consumed_files: Set[Path], filepath_lookup: Dict[str, RawImageFile], file_errors: Dict[Path, List[str]], upload_session: RawImageUploadSession, ): unconsumed_files = input_files - consumed_files n_errors = 0 for filepath in consumed_files: raw_image = filepath_lookup[str(filepath)] raw_image.error = None raw_image.consumed = True raw_image.save() for filepath in unconsumed_files: raw_file = filepath_lookup[str(filepath)] error = "\n".join(file_errors[filepath]) raw_file.error = ( f"File could not be processed by any image builder:\n\n{error}") n_errors += 1 raw_file.save() if unconsumed_files: upload_session.error_message = ( f"{len(unconsumed_files)} file(s) could not be imported") if upload_session.creator and upload_session.creator.email: send_failed_file_import(n_errors, upload_session)
def queue_build_image_job(instance: RawImageUploadSession = None, created: bool = False, *_, **__): if created: try: RawImageUploadSession.objects.filter(pk=instance.pk).update( session_state=UPLOAD_SESSION_STATE.queued, processing_task=instance.pk) build_images.apply_async( task_id=str(instance.pk), args=(instance.pk, ), ) except Exception as e: instance.session_state = UPLOAD_SESSION_STATE.stopped instance.error_message = f"Could not start job: {e}" instance.save() raise e
def create_raw_upload_image_session( *, images: List[str], delete_file=False, user=None, linked_task=None, ) -> Tuple[RawImageUploadSession, Dict[str, RawImageFile]]: creator = user or UserFactory(email="*****@*****.**") upload_session = RawImageUploadSession(creator=creator) uploaded_images = {} for image in images: staged_file = create_file_from_filepath(RESOURCE_PATH / image) image = RawImageFile.objects.create( upload_session=upload_session, filename=staged_file.name, staged_file_id=staged_file.uuid, ) uploaded_images[staged_file.name] = image if delete_file: StagedAjaxFile( uploaded_images["image10x10x10.zraw"].staged_file_id).delete() upload_session.save() with capture_on_commit_callbacks(execute=True): upload_session.process_images(linked_task=linked_task) return upload_session, uploaded_images
def _copy_output_files(self, *, container, base_dir: Path): found_files = container.exec_run(f"find {base_dir} -type f") if found_files.exit_code != 0: logger.warning(f"Error listing {base_dir}") return output_files = [ base_dir / Path(f) for f in found_files.output.decode().splitlines() ] if not output_files: logger.warning("Output directory is empty") return # TODO: This thing should not interact with the database result = Result.objects.create(job_id=self._job_id) # Create the upload session but do not save it until we have the # files upload_session = RawImageUploadSession(algorithm_result=result) images = [] for file in output_files: new_uuid = uuid.uuid4() django_file = File(get_file(container=container, src=file)) staged_file = StagedFile( csrf="staging_conversion_csrf", client_id=self._job_id, client_filename=file.name, file_id=new_uuid, timeout=timezone.now() + timedelta(hours=24), start_byte=0, end_byte=django_file.size - 1, total_size=django_file.size, ) staged_file.file.save(f"{uuid.uuid4()}", django_file) staged_file.save() staged_ajax_file = StagedAjaxFile(new_uuid) images.append( RawImageFile( upload_session=upload_session, filename=staged_ajax_file.name, staged_file_id=staged_ajax_file.uuid, ) ) upload_session.save(skip_processing=True) RawImageFile.objects.bulk_create(images) upload_session.process_images()
def _copy_output_files(self, *, container, base_dir: Path): output_files = [ base_dir / Path(f) for f in container.exec_run(f"find {base_dir} -type f") .output.decode() .splitlines() ] if not output_files: raise ValueError("Output directory is empty") # TODO: This thing should not interact with the database job = SubmissionToAnnotationSetJob.objects.get(pk=self._job_id) annotationset = AnnotationSet.objects.create( creator=job.submission.creator, base=job.base, submission=job.submission, kind=AnnotationSet.PREDICTION, ) if self.__was_unzipped: # Create the upload session but do not save it until we have the # files upload_session = RawImageUploadSession(annotationset=annotationset) images = [] for file in output_files: new_uuid = uuid.uuid4() django_file = File(get_file(container=container, src=file)) staged_file = StagedFile( csrf="staging_conversion_csrf", client_id=self._job_id, client_filename=file.name, file_id=new_uuid, timeout=timezone.now() + timedelta(hours=24), start_byte=0, end_byte=django_file.size - 1, total_size=django_file.size, ) staged_file.file.save(f"{uuid.uuid4()}", django_file) staged_file.save() staged_ajax_file = StagedAjaxFile(new_uuid) images.append( RawImageFile( upload_session=upload_session, filename=staged_ajax_file.name, staged_file_id=staged_ajax_file.uuid, ) ) upload_session.save(skip_processing=True) RawImageFile.objects.bulk_create(images) upload_session.process_images() else: assert len(output_files) == 1 f = get_file(container=container, src=output_files[0]) annotationset.labels = process_csv_file(f) annotationset.save()