def __init__(self, auth: Tuple[str, str, str], uf: UploadedFile, root: Optional[UploadedFile] = None, existing_mapping: Optional[Dict[str, UploadedFile]] = None, projects: Optional[ProjectCollection] = None, user_properties: Optional[Iterator[Tuple[str, str]]] = None): """ Parameters ---------- uf : UploadedFile The UploadedFile associated to the listener. The UploadedFile can be new, i.e. not yet saved. root : UploadedFile (optional). If set, it is supposed to already exist, i.e not new. """ self.auth = auth self.path_uf_mapping = dict() if uf.is_new(): uf.save() self.path_uf_mapping[uf.path] = uf self.initial_uf = uf if existing_mapping is not None: self.path_uf_mapping.update(copy(existing_mapping)) else: if root is None: if uf.parent is not None: root = UploadedFile().fetch(uf.parent) else: root = uf while root is not None: self.path_uf_mapping[root.path] = root self.root_path = root.path if root.parent is None: root = None else: root = UploadedFile().fetch(root.parent) self.abstract_images = [] self.projects = projects self.user_properties = user_properties self.images = []
def make_hdf5(): uploaded_file_id = _get_parameter()('uploadedFile') image_id = _get_parameter()('image') companion_file_id = _get_parameter()('companionFile') get_core_connection() uploaded_file = UploadedFile().fetch(uploaded_file_id) image = AbstractImage().fetch(image_id) slices = AbstractSliceCollection().fetch_with_filter( "abstractimage", image.id) cf = CompanionFile().fetch(companion_file_id) n_workers = current_app.config['N_TILE_READER_WORKERS'] tile_size = current_app.config['TILE_SIZE'] n_written_tiles_to_update = current_app.config[ 'N_WRITTEN_TILES_TO_UPDATE_PROGRESS'] root = current_app.config['ROOT'] thread = Thread(target=create_hdf5, args=(uploaded_file, image, slices, cf, n_workers, tile_size, n_written_tiles_to_update, root)) thread.daemon = True thread.start() return {'started': True}
async def legacy_import(request: Request, background: BackgroundTasks, core: Optional[str] = None, cytomine: Optional[str] = None, storage: Optional[int] = None, id_storage: Optional[int] = Query(None, alias='idStorage'), projects: Optional[str] = None, id_project: Optional[str] = Query(None, alias='idProject'), sync: Optional[bool] = False, keys: Optional[str] = None, values: Optional[str] = None, upload_name: str = Form(..., alias="files[].name"), upload_path: str = Form(..., alias="files[].path"), upload_size: int = Form(..., alias="files[].size"), config: Settings = Depends(get_settings)): """ Import a file (legacy) """ core = cytomine if cytomine is not None else core if not core: raise BadRequestException(detail="core or cytomine parameter missing.") id_storage = id_storage if id_storage is not None else storage if not id_storage: raise BadRequestException( detail="idStorage or storage parameter missing.") projects_to_parse = id_project if id_project is not None else projects try: id_projects = [] if projects_to_parse: projects = ensure_list(projects_to_parse.split(",")) id_projects = [int(p) for p in projects] except ValueError: raise BadRequestException( detail="Invalid projects or idProject parameter.") public_key, signature = parse_authorization_header(request.headers) cytomine_auth = (core, config.cytomine_public_key, config.cytomine_private_key) with Cytomine(*cytomine_auth, configure_logging=False) as c: if not c.current_user: raise AuthenticationException( "PIMS authentication to Cytomine failed.") this = get_this_image_server(config.pims_url) cyto_keys = c.get(f"userkey/{public_key}/keys.json") private_key = cyto_keys["privateKey"] if sign_token(private_key, parse_request_token(request)) != signature: raise AuthenticationException("Authentication to Cytomine failed") c.set_credentials(public_key, private_key) user = c.current_user storage = Storage().fetch(id_storage) if not storage: raise CytomineProblem(f"Storage {id_storage} not found") projects = ProjectCollection() for pid in id_projects: project = Project().fetch(pid) if not project: raise CytomineProblem(f"Project {pid} not found") projects.append(project) keys = keys.split(',') if keys is not None else [] values = values.split(',') if values is not None else [] if len(keys) != len(values): raise CytomineProblem( f"Keys {keys} and values {values} have varying size.") user_properties = zip(keys, values) upload_name = sanitize_filename(upload_name) root = UploadedFile(upload_name, upload_path, upload_size, "", "", id_projects, id_storage, user.id, this.id, UploadedFile.UPLOADED) cytomine = CytomineListener(cytomine_auth, root, projects=projects, user_properties=user_properties) if sync: try: run_import(upload_path, upload_name, extra_listeners=[cytomine], prefer_copy=False) root = cytomine.initial_uf.fetch() images = cytomine.images return [{ "status": 200, "name": upload_name, "uploadedFile": serialize_cytomine_model(root), "images": [{ "image": serialize_cytomine_model(image[0]), "imageInstances": serialize_cytomine_model(image[1]) } for image in images] }] except Exception as e: traceback.print_exc() return JSONResponse(content=[{ "status": 500, "error": str(e), "files": [{ "name": upload_name, "size": 0, "error": str(e) }] }], status_code=400) else: send_task(Task.IMPORT_WITH_CYTOMINE, args=[ cytomine_auth, upload_path, upload_name, cytomine, False ], starlette_background=background) return JSONResponse(content=[{ "status": 200, "name": upload_name, "uploadedFile": serialize_cytomine_model(root), "images": [] }], status_code=200)
def start_conversion(self, path: Path, parent_path: Path, *args, **kwargs): uf = UploadedFile() uf.status = UploadedFile.CONVERTING uf.originalFilename = path.name uf.filename = str(path.relative_to(FILE_ROOT_PATH)) uf.size = 0 uf.ext = "" uf.contentType = "" parent = self.get_uf(parent_path) uf.storage = parent.storage uf.user = parent.user uf.parent = parent.id uf.imageServer = parent.imageServer uf.save() self.path_uf_mapping[str(path)] = uf parent.status = UploadedFile.CONVERTING parent.update()
def register_file(self, path: Path, parent_path: Path, *args, **kwargs): parent = self.get_uf(parent_path) uf = UploadedFile() uf.status = UploadedFile.UPLOADED uf.contentType = "" uf.size = path.size uf.filename = str(path.relative_to(FILE_ROOT_PATH)) uf.originalFilename = str(path.name) uf.ext = "" uf.storage = parent.storage uf.user = parent.user uf.parent = parent.id uf.imageServer = parent.imageServer uf.save() self.path_uf_mapping[str(path)] = uf
def end_unpacking(self, path: Path, unpacked_path: Path, *args, format: AbstractFormat = None, is_collection: bool = False, **kwargs): parent = self.get_uf(path) parent.status = UploadedFile.UNPACKED parent.update() if not is_collection: uf = UploadedFile() uf.status = UploadedFile.UPLOADED # Better status ? uf.contentType = format.get_identifier() # TODO uf.size = unpacked_path.size uf.filename = str(unpacked_path.relative_to(FILE_ROOT_PATH)) uf.originalFilename = str(format.main_path.name) uf.ext = "" uf.storage = parent.storage uf.user = parent.user uf.parent = parent.id uf.imageServer = parent.imageServer uf.save() self.path_uf_mapping[str(unpacked_path)] = uf
def _find_uf_by_id(self, id: int) -> UploadedFile: return next( (uf for uf in self.path_uf_mapping.values() if uf.id == id), UploadedFile().fetch(id))