def _copy_output_files(self, *, container, base_dir: Path): found_files = container.exec_run(f"find {base_dir} -type f") if found_files.exit_code != 0: logger.warning(f"Error listing {base_dir}") return output_files = [ base_dir / Path(f) for f in found_files.output.decode().splitlines() ] if not output_files: logger.warning("Output directory is empty") return # TODO: This thing should not interact with the database result = Result.objects.create(job_id=self._job_id) # Create the upload session but do not save it until we have the # files upload_session = RawImageUploadSession(algorithm_result=result) images = [] for file in output_files: new_uuid = uuid.uuid4() django_file = File(get_file(container=container, src=file)) staged_file = StagedFile( csrf="staging_conversion_csrf", client_id=self._job_id, client_filename=file.name, file_id=new_uuid, timeout=timezone.now() + timedelta(hours=24), start_byte=0, end_byte=django_file.size - 1, total_size=django_file.size, ) staged_file.file.save(f"{uuid.uuid4()}", django_file) staged_file.save() staged_ajax_file = StagedAjaxFile(new_uuid) images.append( RawImageFile( upload_session=upload_session, filename=staged_ajax_file.name, staged_file_id=staged_ajax_file.uuid, ) ) upload_session.save(skip_processing=True) RawImageFile.objects.bulk_create(images) upload_session.process_images()
def create_upload(image_files): raw_files = [] upload_session = RawImageUploadSession.objects.create( creator=self.request.user) for image_file in image_files: raw_files.append( RawImageFile( upload_session=upload_session, filename=image_file.name, staged_file_id=image_file.uuid, )) RawImageFile.objects.bulk_create(list(raw_files)) return upload_session.pk
def save(self, commit=True): instance = super().save(commit=False) # Create links between the created session and all uploaded files uploaded_files = self.cleaned_data[ "files"] # type: List[StagedAjaxFile] raw_files = [ RawImageFile( upload_session=instance, filename=uploaded_file.name, staged_file_id=uploaded_file.uuid, ) for uploaded_file in uploaded_files ] if commit: with transaction.atomic(): instance.save() RawImageFile.objects.bulk_create(raw_files) return instance
def save(self, commit=True): instance = super().save(commit=False) # type: RawImageUploadSession # Create links between the created session and all uploaded files uploaded_files = self.cleaned_data[ "files"] # type: List[StagedAjaxFile] raw_files = [ RawImageFile( upload_session=instance, filename=uploaded_file.name, staged_file_id=uploaded_file.uuid, ) for uploaded_file in uploaded_files ] if commit: instance.save(skip_processing=True) RawImageFile.objects.bulk_create(raw_files) instance.process_images() return instance
def _copy_output_files(self, *, container, base_dir: Path): output_files = [ base_dir / Path(f) for f in container.exec_run(f"find {base_dir} -type f") .output.decode() .splitlines() ] if not output_files: raise ValueError("Output directory is empty") # TODO: This thing should not interact with the database job = SubmissionToAnnotationSetJob.objects.get(pk=self._job_id) annotationset = AnnotationSet.objects.create( creator=job.submission.creator, base=job.base, submission=job.submission, kind=AnnotationSet.PREDICTION, ) if self.__was_unzipped: # Create the upload session but do not save it until we have the # files upload_session = RawImageUploadSession(annotationset=annotationset) images = [] for file in output_files: new_uuid = uuid.uuid4() django_file = File(get_file(container=container, src=file)) staged_file = StagedFile( csrf="staging_conversion_csrf", client_id=self._job_id, client_filename=file.name, file_id=new_uuid, timeout=timezone.now() + timedelta(hours=24), start_byte=0, end_byte=django_file.size - 1, total_size=django_file.size, ) staged_file.file.save(f"{uuid.uuid4()}", django_file) staged_file.save() staged_ajax_file = StagedAjaxFile(new_uuid) images.append( RawImageFile( upload_session=upload_session, filename=staged_ajax_file.name, staged_file_id=staged_ajax_file.uuid, ) ) upload_session.save(skip_processing=True) RawImageFile.objects.bulk_create(images) upload_session.process_images() else: assert len(output_files) == 1 f = get_file(container=container, src=output_files[0]) annotationset.labels = process_csv_file(f) annotationset.save()