示例#1
0
    def from_json_value(cls, value: JsonValue) -> "HttpFs":
        value_obj = ensureJsonObject(value)

        raw_headers = value_obj.get("headers")
        if raw_headers is None:
            headers = {}
        else:
            headers_obj = ensureJsonObject(raw_headers)
            headers = {
                ensureJsonString(k): ensureJsonString(v)
                for k, v in headers_obj.items()
            }

        read_url = Url.parse(ensureJsonString(value_obj.get("read_url")))
        if read_url is None:
            raise ValueError(
                f"Bad 'read_url' in json payload: {json.dumps(value, indent=4)}"
            )

        raw_write_url = value_obj.get("write_url")
        if raw_write_url is None:
            write_url = None
        else:
            write_url = Url.parse(ensureJsonString(raw_write_url))
            if write_url is None:
                raise ValueError(
                    f"Bad write_url in HttpFs payload: {json.dumps(value, indent=4)}"
                )

        return cls(
            read_url=read_url,
            write_url=write_url,
            headers=headers,
        )
示例#2
0
def test_httpfs(tmp_path):
    httpd = start_test_server(tmp_path, port=8123)
    try:
        fs = HttpFs(read_url=Url.parse("http://localhost:8123/"))

        eprint("  -->  Opening some file...")
        with fs.openbin("/dir1/file1.txt", "r") as f:
            assert f.read() == "file1_contents".encode("ascii")

        eprint("  --> Verifying that opendir works nested dirs...")
        dir1 = fs.opendir("dir2")
        assert dir1.openbin("file2.txt", "r").read() == "file2_contents".encode("ascii")

        fs2 = HttpFs(read_url=Url.parse("http://localhost:8123/dir1"))
        assert fs2.desc("file2.txt") == "http://localhost:8123/dir1/file2.txt"

        # check that "/" maps to the base url that was used to create the filesystem
        assert fs2.desc("/file2.txt") == "http://localhost:8123/dir1/file2.txt"

        #check that .. works even when creating the fs
        fs_updir = HttpFs(read_url=Url.parse("http://localhost:8123/dir1/.."))
        with fs_updir.openbin("dir1/file1.txt", "r") as f:
            assert f.read() == "file1_contents".encode("ascii")

    finally:
        httpd.shutdown()
示例#3
0
    async def forward_chunk_request(self,
                                    request: web.Request) -> web.Response:
        """Redirects a precomp chunk request to the original URL"""
        encoded_original_url = request.match_info.get("encoded_original_url")
        if not encoded_original_url:
            return web.Response(status=400, text="Missing parameter: url")
        decoded_url = b64decode(encoded_original_url,
                                altchars=b'-_').decode('utf8')
        url = Url.parse(decoded_url)
        if url is None:
            return web.Response(status=400, text=f"Bad url: {decoded_url}")
        rest = request.match_info.get("rest", "").lstrip("/")
        tile_url = url.joinpath(rest)

        if tile_url.hostname != "data-proxy.ebrains.eu":
            raise web.HTTPFound(location=tile_url.schemeless_raw)

        token = UserToken.get_global_login_token()
        if isinstance(token, UsageError):
            return web.Response(status=403, text="Token has expired")  # FIXME

        async with self.http_client_session.get(
                tile_url.schemeless_raw,
                ssl=self.ssl_context,
                headers=token.as_auth_header(),
        ) as response:
            cscs_url = (await response.json())["url"]
            raise web.HTTPFound(location=cscs_url)
示例#4
0
 def __init__(
     self,
     *,
     access_token: str,
     refresh_token: Optional[str] = None,
     # expires_in: int,
     # refresh_expires_in: int,
     # token_type: str,
     # id_token: str,
     # not_before_policy: int,
     # session_state: str,
     # scope: str
 ):
     api_url = Url.parse("https://iam.ebrains.eu/auth/realms/hbp/protocol/openid-connect")
     assert api_url is not None
     self._api_url = api_url
     self.access_token = access_token
     self.refresh_token = refresh_token
     # self.expires_in = expires_in
     # self.refresh_expires_in = refresh_expires_in
     # self.token_type = token_type
     # self.id_token = id_token
     # self.not_before_policy = not_before_policy
     # self.session_state = session_state
     # self.scope = scope
     super().__init__()
示例#5
0
def test_url_basics():
    raw = "precomputed://http://some.host.com/some/path?a=123&b=456#myhash"
    url = Url.parse(raw)
    assert url.datascheme == DataScheme.PRECOMPUTED
    assert url.protocol == Protocol.HTTP
    assert url.port == None
    assert url.path == PurePosixPath("/some/path")
    assert url.search == {"a": '123', "b": '456'}
    assert url.double_protocol_raw == raw

    url2 = url.updated_with(extra_search={"c": "456", "d": "789"})
    assert url2.search == {"a": '123', "b": '456', "c": "456", "d": "789"}
    assert url2.raw == "precomputed+http://some.host.com/some/path?a=123&b=456&c=456&d=789#myhash"

    url3 = Url.parse(
        "http://some.host.com/some/path?a=123&b=%5B1%2C+2%2C+3%5D#myhash")
    assert url3.search["b"] == '[1, 2, 3]'
示例#6
0
 def from_json_value(cls, value: JsonValue) -> "JsonableFilesystem":
     value_obj = ensureJsonObject(value)
     raw_write_url = value_obj.get("write_url")
     raw_headers = value_obj.get("headers")
     if raw_headers is None:
         headers = {}
     else:
         headers_obj = ensureJsonObject(raw_headers)
         headers = {
             ensureJsonString(k): ensureJsonString(v)
             for k, v in headers_obj.items()
         }
     return cls(
         read_url=Url.parse(ensureJsonString(value_obj.get("read_url"))),
         write_url=None if raw_write_url is None else Url.parse(
             ensureJsonString(raw_write_url)),
         headers=headers,
     )
示例#7
0
 async def forward_chunk_request(self,
                                 request: web.Request) -> web.Response:
     """Redirects a precomp chunk request to the original URL"""
     encoded_original_url = request.match_info.get("encoded_original_url")
     if not encoded_original_url:
         return web.Response(status=400, text="Missing parameter: url")
     info_url = Url.parse(
         b64decode(encoded_original_url, altchars=b'-_').decode('utf8'))
     rest = request.match_info.get("rest", "").lstrip("/")
     raise web.HTTPFound(location=info_url.joinpath(rest).schemeless_raw)
示例#8
0
 def try_to_datasource(
     self,
     *,
     ilp_fs: JsonableFilesystem,
     ilp_path: PurePosixPath,
     allowed_protocols: Sequence[Protocol] = (Protocol.HTTP, Protocol.HTTPS)
 ) -> "FsDataSource | Exception":
     url = Url.parse(self.filePath)
     if url is None: # filePath was probably a path, not an URL
         path = ilp_path.parent.joinpath(self.filePath)
         url = Url.parse(ilp_fs.geturl(path.as_posix()))
     if url is None:
         return Exception(f"Could not parse {self.filePath} as URL")
     datasources_result = try_get_datasources_from_url(url=url, allowed_protocols=allowed_protocols)
     if isinstance(datasources_result, Exception):
         return Exception(f"Could not open {url} as a data source: {datasources_result}")
     if len(datasources_result) != 1:
         return Exception(f"Expected a single datasource from {url}, found {len(datasources_result)}")
     return datasources_result[0]
示例#9
0
    async def stripped_precomputed_info(self,
                                        request: web.Request) -> web.Response:
        """Serves a precomp info stripped of all but one scales"""
        resolution_x = request.match_info.get("resolution_x")
        resolution_y = request.match_info.get("resolution_y")
        resolution_z = request.match_info.get("resolution_z")
        if resolution_x is None or resolution_y is None or resolution_z is None:
            return web.Response(
                status=400,
                text=
                f"Bad resolution: {resolution_x}_{resolution_y}_{resolution_z}"
            )
        try:
            resolution = (int(resolution_x), int(resolution_y),
                          int(resolution_z))
        except Exception:
            return web.Response(
                status=400,
                text=
                f"Bad resolution: {resolution_x}_{resolution_y}_{resolution_z}"
            )

        encoded_original_url = request.match_info.get("encoded_original_url")
        if not encoded_original_url:
            return web.Response(status=400, text="Missing parameter: url")

        decoded_url = b64decode(encoded_original_url,
                                altchars=b'-_').decode('utf8')
        base_url = Url.parse(decoded_url)
        if base_url is None:
            return web.Response(status=400, text=f"Bad url: {decoded_url}")
        info_url = base_url.joinpath("info")
        logger.debug(f"Will request this info: {info_url.schemeless_raw}")

        token = UserToken.get_global_login_token()
        if isinstance(token, UsageError):
            return web.Response(status=403, text="Token has expired")  # FIXME

        async with self.http_client_session.get(
                info_url.schemeless_raw,
                ssl=self.ssl_context,
                headers=token.as_auth_header()
                if info_url.hostname == "data-proxy.ebrains.eu" else {},
                params={"redirect": "true"}
                if info_url.hostname == "data-proxy.ebrains.eu" else {},
        ) as response:
            response_text = await response.text()
            if response.status // 100 != 2:
                return web.Response(status=response.status, text=response_text)
            info = PrecomputedChunksInfo.from_json_value(
                json.loads(response_text))

        stripped_info = info.stripped(resolution=resolution)
        return web.json_response(stripped_info.to_json_value())
示例#10
0
    def _get_tmp_url(self, path: str) -> Url:
        object_url = self.url.concatpath(path).updated_with(
            search={
                **self.url.search, "redirect": "false"
            })
        response = self.session.get(object_url.raw)
        response.raise_for_status()

        response_obj = ensureJsonObject(response.json())
        cscs_url = Url.parse(ensureJsonString(response_obj.get("url")))
        assert cscs_url is not None
        return cscs_url
示例#11
0
    def from_url(
        cls,
        *,
        url: Union[Url, str],
        name: Optional[str] = None,
        allowed_protocols: Sequence[Protocol] = (Protocol.HTTP, Protocol.HTTPS)
    ) -> "UiDataSource":
        parsed_url = Url.parse(url) if isinstance(url, str) else url
        if parsed_url not in allowed_protocols:
            raise ValueError(
                f"Disallowed protocol '{parsed_url.protocol}'' in url '{url}'")

        path = Path(parsed_url.path)
        if parsed_url.datascheme == DataScheme.PRECOMPUTED:
            # expect a resolution query param determining which scale to use: e.g.: ?precomputed_scale_resolution=10_20_30
            resolution_param_name = "precomputed_scale_resolution"
            raw_resolution = parsed_url.search.get(resolution_param_name)
            if raw_resolution is None:
                raise ValueError(
                    f"Missing '{resolution_param_name}' query parameter")
            resolution = tuple(int(axis) for axis in raw_resolution.split("_"))
            if len(raw_resolution) != 3:
                raise ValueError(
                    f"Bad '{resolution_param_name}': {raw_resolution}")
            parsed_url = parsed_url.updated_with(
                search={
                    k: v
                    for k, v in parsed_url.search.items()
                    if k != resolution_param_name
                })
            fs = parsed_url.get_filesystem()
            datasource = PrecomputedChunksDataSource(
                path=path,
                filesystem=fs,
                resolution=(resolution[0], resolution[1], resolution[2]))
        elif re.search(r'\.(jpe?g|png|bmp)$', parsed_url.path.name,
                       re.IGNORECASE):
            datasource = SkimageDataSource(
                path=path, filesystem=parsed_url.get_filesystem())
        elif re.search(r'\.n5\b', parsed_url.path.as_posix(), re.IGNORECASE):
            datasource = N5DataSource(path=path,
                                      filesystem=parsed_url.get_filesystem())
        else:
            raise ValueError(f"Could not open url {url}")

        return UiDataSource(
            datasource=datasource,
            url=parsed_url,
            name=str(uuid.uuid4())
            if name is None else name,  # FIXME: handle name better
        )
示例#12
0
 def __init__(
     self,
     *,
     access_token: str,
     refresh_token: Optional[str] = None,
     # expires_in: int,
     # refresh_expires_in: int,
     # token_type: str,
     # id_token: str,
     # not_before_policy: int,
     # session_state: str,
     # scope: str
 ):
     self._api_url = Url.parse(
         "https://iam.ebrains.eu/auth/realms/hbp/protocol/openid-connect")
     self.access_token = access_token
     self.refresh_token = refresh_token
示例#13
0
    async def get_datasources_from_url(self,
                                       request: web.Request) -> web.Response:
        payload = await request.json()
        raw_url = payload.get("url")
        if raw_url is None:
            return web.json_response({"error", "Missing 'url' key in payload"},
                                     status=400)
        url = Url.parse(raw_url)
        if url is None:
            return web.json_response({"error", "Bad url in payload"},
                                     status=400)

        selected_resolution: "Tuple[int, int, int] | None" = None
        stripped_precomputed_url_regex = re.compile(
            r"/stripped_precomputed/url=(?P<url>[^/]+)/resolution=(?P<resolution>\d+_\d+_\d+)"
        )
        match = stripped_precomputed_url_regex.search(url.path.as_posix())
        if match:
            url = Url.from_base64(match.group("url"))
            selected_resolution = tuple(
                int(axis) for axis in match.group("resolution").split("_"))

        datasources_result = try_get_datasources_from_url(
            url=url, allowed_protocols=(Protocol.HTTP, Protocol.HTTPS))
        if isinstance(datasources_result, Exception):
            return web.json_response({"error": str(datasources_result)},
                                     status=400)

        if selected_resolution:
            datasources = [
                ds for ds in datasources_result
                if ds.spatial_resolution == selected_resolution
            ]
            if len(datasources) != 1:
                return web.json_response({
                    "error":
                    f"Expected single datasource, found these: {json.dumps([ds.to_json_value() for ds in datasources], indent=4)}"
                })
        else:
            datasources = datasources_result

        return web.json_response(
            {"datasources": tuple([ds.to_json_value() for ds in datasources])})
示例#14
0
    async def stripped_precomputed_info(self,
                                        request: web.Request) -> web.Response:
        """Serves a precomp info stripped of all but one scales"""
        resolution_x = request.match_info.get("resolution_x")
        resolution_y = request.match_info.get("resolution_y")
        resolution_z = request.match_info.get("resolution_z")
        if resolution_x is None or resolution_y is None or resolution_z is None:
            return web.Response(
                status=400,
                text=
                f"Bad resolution: {resolution_x}_{resolution_y}_{resolution_z}"
            )
        try:
            resolution = (int(resolution_x), int(resolution_x),
                          int(resolution_x))
        except Exception:
            return web.Response(
                status=400,
                text=
                f"Bad resolution: {resolution_x}_{resolution_y}_{resolution_z}"
            )

        encoded_original_url = request.match_info.get("encoded_original_url")
        if not encoded_original_url:
            return web.Response(status=400, text="Missing parameter: url")

        info_url = Url.parse(
            b64decode(encoded_original_url,
                      altchars=b'-_').decode('utf8')).joinpath("info")
        print(f"+++++ Will request this info: {info_url.schemeless_raw}",
              file=sys.stderr)
        async with aiohttp.ClientSession() as session:
            async with session.get(info_url.schemeless_raw,
                                   ssl=self.ssl_context) as response:
                response_text = await response.text()
                if response.status // 100 != 2:
                    return web.Response(status=response.status,
                                        text=response_text)
                info = PrecomputedChunksInfo.from_json_value(
                    json.loads(response_text))

        stripped_info = info.stripped(resolution=resolution)
        return web.json_response(stripped_info.to_json_value())
示例#15
0
 def __init__(self,
              *,
              outer_path: PurePosixPath,
              inner_path: PurePosixPath,
              location: Point5D = Point5D.zero(),
              filesystem: JsonableFilesystem,
              spatial_resolution: Optional[Tuple[int, int, int]] = None):
     self.outer_path = outer_path
     self.inner_path = inner_path
     self.filesystem = filesystem
     binfile = filesystem.openbin(outer_path.as_posix())
     # FIXME: h5py might not like this if the filesystem isn't OSFS
     f = h5py.File(binfile, "r")  #type: ignore
     try:
         dataset = f[inner_path.as_posix()]
         if not isinstance(dataset, h5py.Dataset):
             raise ValueError(f"{inner_path} is not a Dataset")
         self.axiskeys = self.getAxisKeys(dataset)
         self._dataset = dataset
         tile_shape = Shape5D.create(raw_shape=self._dataset.chunks
                                     or self._dataset.shape,
                                     axiskeys=self.axiskeys)
         base_url = Url.parse(filesystem.geturl(outer_path.as_posix()))
         assert base_url is not None
         super().__init__(
             c_axiskeys_on_disk=self.axiskeys,
             tile_shape=tile_shape,
             interval=Shape5D.create(
                 raw_shape=self._dataset.shape,
                 axiskeys=self.axiskeys).to_interval5d(location),
             dtype=self._dataset.dtype,
             spatial_resolution=spatial_resolution or (1, 1, 1),  # FIXME
             filesystem=filesystem,
             path=self.outer_path)
     except Exception as e:
         f.close()
         raise e
async def main():
    ilastik_root_url = Url.parse("https://app.ilastik.org/")
    assert ilastik_root_url is not None
    data_url = Url.parse(
        "precomputed://https://app.ilastik.org/public/images/c_cells_2.precomputed"
    )
    assert data_url is not None
    datasources = try_get_datasources_from_url(url=data_url)
    if isinstance(datasources, Exception):
        raise datasources
    assert not isinstance(datasources, Exception)
    ds = datasources[0]
    token = UserToken.from_environment()
    assert isinstance(token, UserToken)

    async with aiohttp.ClientSession(
            cookies={EbrainsLogin.AUTH_COOKIE_KEY: token.access_token
                     }) as session:
        print(f"Creating new session--------------")
        async with session.post(ilastik_root_url.concatpath("api/session").raw,
                                json={"session_duration_minutes":
                                      15}) as response:
            response.raise_for_status()
            session_data: Dict[str, Any] = await response.json()
            session_id = session_data["id"]
        print(
            f"Done creating session: {json.dumps(session_data)} <<<<<<<<<<<<<<<<<<"
        )

        for _ in range(10):
            response = await session.get(
                ilastik_root_url.concatpath(f"api/session/{session_id}").raw)
            response.raise_for_status()
            session_status = await response.json()
            if session_status["status"] == "ready":
                session_url = session_status["url"]
                break
            print(f"Session {session_id} is notready yet")
            _ = await asyncio.sleep(2)
        else:
            raise RuntimeError("Given up waiting on session")

        # exit(1)

        async with session.ws_connect(f"{session_url}/ws") as ws:
            _ = asyncio.get_event_loop().create_task(read_server_status(ws))
            print("sending some feature extractors=======")
            await ws.send_json(
                RPCPayload(applet_name="feature_selection_applet",
                           method_name="add_feature_extractors",
                           arguments={
                               "feature_extractors":
                               tuple(fe.to_json_value()
                                     for fe in get_sample_feature_extractors())
                           }).to_json_value())
            print("done sending feature extractors<<<<<")

            print("sending some annotations=======")
            default_label_names = ["Foreground", "Background"]
            for label_name, label in zip(
                    default_label_names,
                    get_sample_c_cells_pixel_annotations(
                        override_datasource=ds)):
                for a in label.annotations:
                    await ws.send_json(
                        RPCPayload(applet_name="brushing_applet",
                                   method_name="add_annotation",
                                   arguments={
                                       "label_name": label_name,
                                       "annotation": a.to_json_data(),
                                   }).to_json_value())

            print("done sending annotations<<<<<")
            await asyncio.sleep(2)

            print("Enabling live update=======")
            await ws.send_json(
                RPCPayload(applet_name="pixel_classification_applet",
                           method_name="set_live_update",
                           arguments={
                               "live_update": True
                           }).to_json_value())
            await asyncio.sleep(2)

            # from base64 import b64encode
            # encoded_ds: str = b64encode(json.dumps(ds.to_json_value()).encode("utf8"), altchars=b'-_').decode("utf8")

            # response_tasks = {}
            # for tile in ds.roi.get_tiles(tile_shape=Shape5D(x=256, y=256, c=2), tiles_origin=Point5D.zero()):
            #     url = f"{session_url}/predictions/raw_data={encoded_ds}/generation={classifier_generation}/data/{tile.x[0]}-{tile.x[1]}_{tile.y[0]}-{tile.y[1]}_0-1"
            #     print(f"---> Requesting {url}")
            #     response_tasks[tile] = session.get(url)

            # for tile, resp in response_tasks.items():
            #     async with resp as response:
            #         print("Status:", response.status)
            #         print("Content-type:", response.headers['content-type'])

            #         if response.status // 100 != 2:
            #             raise Exception(f"Error: {(await response.content.read()).decode('utf8')}")

            #         tile_bytes = await response.content.read()
            #         print(f"Got predictions<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")

            #         raw_data = np.frombuffer(tile_bytes, dtype=np.uint8).reshape(2, tile.shape.y, tile.shape.x)
            #         Array5D(raw_data, axiskeys="cyx").show_channels()

            hbp_image_service_bucket_fs = BucketFs(
                bucket_name="hbp-image-service",
                ebrains_user_token=UserToken.get_global_token_or_raise(),
                prefix=PurePosixPath("/"),
            )

            predictions_export_datasink = create_precomputed_chunks_sink(
                shape=ds.shape.updated(c=2),
                dtype=np.dtype("float32"),
                chunk_size=ds.tile_shape.updated(c=2),
                fs=hbp_image_service_bucket_fs,
            )

            print(f"Sending predictions job request??????")
            await ws.send_json(
                RPCPayload(applet_name="export_applet",
                           method_name="start_export_job",
                           arguments={
                               "datasource":
                               ds.to_json_value(),
                               "datasink":
                               predictions_export_datasink.to_json_value(),
                           }).to_json_value())

            simple_segmentation_datasinks = [
                create_precomputed_chunks_sink(
                    shape=ds.shape.updated(c=3),
                    dtype=np.dtype("uint8"),
                    chunk_size=ds.tile_shape.updated(c=3),
                    fs=hbp_image_service_bucket_fs),
                create_precomputed_chunks_sink(
                    shape=ds.shape.updated(c=3),
                    dtype=np.dtype("uint8"),
                    chunk_size=ds.tile_shape.updated(c=3),
                    fs=hbp_image_service_bucket_fs),
            ]

            print(f"Sending simple segmentation job request??????")
            await ws.send_json(
                RPCPayload(applet_name="export_applet",
                           method_name="start_simple_segmentation_export_job",
                           arguments={
                               "datasource":
                               ds.to_json_value(),
                               "datasinks":
                               tuple(ds.to_json_value()
                                     for ds in simple_segmentation_datasinks),
                           }).to_json_value())

            print(f"---> Job successfully scheduled? Waiting for a while")
            await asyncio.sleep(15)
            print(f"Done waiting. Checking outputs")

            predictions_output = PrecomputedChunksDataSource(
                filesystem=hbp_image_service_bucket_fs,
                path=predictions_export_datasink.path,
                resolution=(1, 1, 1))
            for tile in predictions_output.roi.get_datasource_tiles():
                tile.retrieve().as_uint8(normalized=True)  #.show_channels()

            segmentation_output_1 = PrecomputedChunksDataSource(
                filesystem=hbp_image_service_bucket_fs,
                path=simple_segmentation_datasinks[1].path,
                resolution=(1, 1, 1))
            for tile in segmentation_output_1.roi.get_datasource_tiles():
                tile.retrieve()  #.show_images()

            close_url = f"{session_url}/close"
            print(f"Closing session py sending delete to {close_url}")
            r = await session.delete(close_url)
            r.raise_for_status()

        global finished
        finished = True
示例#17
0
 def __setstate__(self, data: Dict[str, Any]):
     self.__init__(
         read_url=Url.parse(data["read_url"]),
         write_url=Url.parse(data["write_url"]),
         headers=data["headers"],
     )
示例#18
0
 def url(self) -> Url:
     url = Url.parse(self.filesystem.geturl(self.path.as_posix()))
     assert url is not None
     return url
示例#19
0
def test_pixel_classification_workflow():
    executor = get_executor(hint="server_tile_handler")
    priority_executor = PriorityExecutor(executor=executor,
                                         max_active_job_steps=8)

    workflow = PixelClassificationWorkflow(
        on_async_change=lambda: print(
            json.dumps(workflow.export_applet._get_json_state(), indent=4)),
        executor=executor,
        priority_executor=priority_executor,
    )

    # GUI turns on live update
    _ = workflow.pixel_classifier_applet.set_live_update(dummy_prompt,
                                                         live_update=True)

    # GUI creates some feature extractors

    all_feature_extractors: List[IlpFilter] = []
    for scale in [0.3, 0.7, 1.0, 1.6, 3.5, 5.0, 10.0][0:3]:
        all_feature_extractors.append(
            IlpGaussianSmoothing(ilp_scale=scale, axis_2d="z"))
        all_feature_extractors.append(
            IlpLaplacianOfGaussian(ilp_scale=scale, axis_2d="z"))
        all_feature_extractors.append(
            IlpGaussianGradientMagnitude(ilp_scale=scale, axis_2d="z"))
        all_feature_extractors.append(
            IlpDifferenceOfGaussians(ilp_scale=scale, axis_2d="z"))
        all_feature_extractors.append(
            IlpStructureTensorEigenvalues(ilp_scale=scale, axis_2d="z"))
        all_feature_extractors.append(
            IlpHessianOfGaussianEigenvalues(ilp_scale=scale, axis_2d="z"))

    _ = workflow.feature_selection_applet.add_feature_extractors(
        user_prompt=dummy_prompt,
        feature_extractors=all_feature_extractors,
    )

    pixel_annotations = get_sample_c_cells_pixel_annotations()
    for label_name, label in zip(workflow.brushing_applet.label_names(),
                                 pixel_annotations):
        for a in label.annotations:
            result = workflow.brushing_applet.add_annotation(
                user_prompt=dummy_prompt,
                label_name=label_name,
                annotation=a,
            )
            assert result.is_ok()

    while workflow.pixel_classifier_applet.pixel_classifier() is None:
        time.sleep(0.2)

    classifier = workflow.pixel_classifier_applet.pixel_classifier()
    assert classifier != None

    _ = workflow.save_project(fs=test_output_osfs,
                              path=PurePosixPath("blas.ilp"))

    url = Url.parse(test_output_osfs.geturl('blas.ilp'))
    assert url is not None

    loaded_workflow = PixelClassificationWorkflow.from_ilp(
        ilp_path=Path(url.path),
        on_async_change=lambda: print(
            json.dumps(workflow.export_applet._get_json_state(), indent=4)),
        executor=executor,
        priority_executor=priority_executor,
        allowed_protocols=[Protocol.FILE],
    )
    print("what")
    print(loaded_workflow)
    assert isinstance(loaded_workflow, PixelClassificationWorkflow)
    print(
        f"Loaded workflow and atete pixel aplet description is {loaded_workflow.pixel_classifier_applet._state.description}"
    )

    # # calculate predictions on an entire data source
    raw_data_source = get_sample_c_cells_datasource()
    # preds_future = executor.submit(classifier.compute, raw_data_source.roi)
    # local_predictions = preds_future.result()
    # local_predictions.as_uint8().show_channels()

    # # calculate predictions on just a piece of arbitrary data
    # exported_tile = executor.submit(classifier.compute, DataRoi(datasource=raw_data_source, x=(100, 200), y=(100, 200)))
    # exported_tile.result().show_channels()

    ###################################

    #######################################33

    # run an export job
    output_fs = get_test_output_osfs()
    predictions_export_datasink = create_precomputed_chunks_sink(
        shape=raw_data_source.shape.updated(c=classifier.num_classes),
        dtype=np.dtype("float32"),
        chunk_size=raw_data_source.tile_shape.updated(
            c=classifier.num_classes),
        fs=output_fs)

    print(f"Sending predictions job request??????")
    result = workflow.export_applet.start_export_job(
        datasource=raw_data_source, datasink=predictions_export_datasink)
    assert result is None

    print(f"---> Job successfully scheduled? Waiting for a while")
    wait_until_jobs_completed(workflow=workflow)
    print(f"Done waiting. Checking outputs")

    predictions_output = PrecomputedChunksDataSource(
        filesystem=output_fs,
        path=predictions_export_datasink.path,
        resolution=(1, 1, 1))
    for tile in predictions_output.roi.get_datasource_tiles():
        _ = tile.retrieve().cut(c=1).as_uint8(
            normalized=True)  #.show_channels()

##################################333

    simple_segmentation_datasinks = [
        create_precomputed_chunks_sink(
            shape=raw_data_source.shape.updated(c=3),
            dtype=np.dtype("uint8"),
            chunk_size=raw_data_source.tile_shape.updated(c=3),
            fs=output_fs),
        create_precomputed_chunks_sink(
            shape=raw_data_source.shape.updated(c=3),
            dtype=np.dtype("uint8"),
            chunk_size=raw_data_source.tile_shape.updated(c=3),
            fs=output_fs),
    ]

    print(f"Sending simple segmentation job request??????")
    result = workflow.export_applet.start_simple_segmentation_export_job(
        datasource=raw_data_source,
        datasinks=simple_segmentation_datasinks,
    )

    print(f"---> Job successfully scheduled? Waiting for a while")
    wait_until_jobs_completed(workflow=workflow)
    print(f"Done waiting. Checking outputs")

    segmentation_output_1 = PrecomputedChunksDataSource(
        filesystem=output_fs,
        path=simple_segmentation_datasinks[1].path,
        resolution=(1, 1, 1))
    for tile in segmentation_output_1.roi.get_datasource_tiles():
        _ = tile.retrieve()  #.show_images()

####################################3

    priority_executor.shutdown()
示例#20
0
def parse_url(url: str) -> Union[Url, UsageError]:
    parsed_url = Url.parse(url)
    if parsed_url is None:
        return UsageError(f"Bad url: {url}")
    return parsed_url
示例#21
0
    def from_json_value(cls, value: JsonValue) -> "OidcClient":
        value_obj = ensureJsonObject(value)
        raw_rootUrl = ensureJsonString(value_obj.get("rootUrl"))
        rootUrl = Url.parse(raw_rootUrl)
        if rootUrl is None:
            rootUrl = Url.parse(
                raw_rootUrl +
                "/")  # it's possible to register a rootUrl without a path -.-
        assert rootUrl is not None

        redirectUris: List[Url] = []
        for raw_redirect_uri in ensureJsonStringArray(
                value_obj.get("redirectUris")):
            try:
                redirect_uri = Url.parse(raw_redirect_uri)
                assert redirect_uri is not None
                redirectUris.append(redirect_uri)
            except ValueError:
                uri = rootUrl.joinpath(
                    PurePosixPath(raw_redirect_uri)
                )  # FIXME: do leading slashes mean root here too?
                redirectUris.append(uri)

        baseUrl = Url.parse(ensureJsonString(value_obj.get("baseUrl")))
        assert baseUrl is not None
        return OidcClient(
            alwaysDisplayInConsole=ensureJsonBoolean(
                value_obj.get("alwaysDisplayInConsole")),
            baseUrl=baseUrl,
            bearerOnly=ensureJsonBoolean(value_obj.get("bearerOnly")),
            clientAuthenticatorType=ensureJsonString(
                value_obj.get("clientAuthenticatorType")),
            clientId=ensureJsonString(value_obj.get("clientId")),
            consentRequired=ensureJsonBoolean(
                value_obj.get("consentRequired")),
            defaultClientScopes=ensureJsonStringArray(
                value_obj.get("defaultClientScopes")),
            description=ensureJsonString(value_obj.get("description")),
            directAccessGrantsEnabled=ensureJsonBoolean(
                value_obj.get("directAccessGrantsEnabled")),
            enabled=ensureJsonBoolean(value_obj.get("enabled")),
            frontchannelLogout=ensureJsonBoolean(
                value_obj.get("frontchannelLogout")),
            fullScopeAllowed=ensureJsonBoolean(
                value_obj.get("fullScopeAllowed")),
            id=ensureJsonString(value_obj.get("id")),
            implicitFlowEnabled=ensureJsonBoolean(
                value_obj.get("implicitFlowEnabled")),
            name=ensureJsonString(value_obj.get("name")),
            nodeReRegistrationTimeout=ensureJsonInt(
                value_obj.get("nodeReRegistrationTimeout")),
            notBefore=ensureJsonInt(value_obj.get("notBefore")),
            optionalClientScopes=set(
                Scope.from_json_value(s) for s in ensureJsonArray(
                    value_obj.get("optionalClientScopes"))),
            protocol=ensureJsonString(value_obj.get("protocol")),
            publicClient=ensureJsonBoolean(value_obj.get("publicClient")),
            redirectUris=tuple(redirectUris),
            registrationAccessToken=ensureJsonString(
                value_obj.get("registrationAccessToken")),
            rootUrl=rootUrl,
            secret=ensureJsonString(value_obj.get("secret")),
            serviceAccountsEnabled=ensureJsonBoolean(
                value_obj.get("serviceAccountsEnabled")),
            standardFlowEnabled=ensureJsonBoolean(
                value_obj.get("standardFlowEnabled")),
            surrogateAuthRequired=ensureJsonBoolean(
                value_obj.get("surrogateAuthRequired")),
            webOrigins=ensureJsonStringArray(value_obj.get("webOrigins")),
        )
示例#22
0
async def main():
    ds = SkimageDataSource(
        filesystem=HttpFs(read_url=Url.parse("https://app.ilastik.org/")),
        path=Path("api/images/c_cells_1.png"))

    async with aiohttp.ClientSession() as session:

        print(f"Creating new session--------------")
        async with session.post(f"https://app.ilastik.org/api/session",
                                json={"session_duration": 30}) as response:
            response.raise_for_status()
            session_data: Dict[str, Any] = await response.json()
            session_id = session_data["id"]
        print(
            f"Done creating session: {json.dumps(session_data)} <<<<<<<<<<<<<<<<<<"
        )

        session_is_ready = False
        for _ in range(10):
            response = await session.get(
                f"https://app.ilastik.org/api/session/{session_id}")
            response.raise_for_status()
            session_status = await response.json()
            if session_status["status"] == "ready":
                session_url = session_status["url"]
                break
            print(f"Session {session_id} is notready yet")
            await asyncio.sleep(2)
        else:
            raise RuntimeError("Given up waiting on session")

        async with session.ws_connect(f"{session_url}/ws") as ws:
            asyncio.get_event_loop().create_task(read_server_status(ws))
            print("sending some feature extractors=======")
            await ws.send_json({
                "feature_selection_applet": [
                    {
                        "__class__": "GaussianSmoothing",
                        "sigma": 0.3,
                        "axis_2d": "z"
                    },
                    {
                        "__class__": "HessianOfGaussianEigenvalues",
                        "scale": 0.7,
                        "axis_2d": "z"
                    },
                ]
            })
            print("done sending feature extractors<<<<<")

            asyncio.get_event_loop().create_task(read_server_status(ws))
            print("sending some annotations=======")
            brush_strokes = [
                Annotation.interpolate_from_points(voxels=[
                    Point5D.zero(x=140, y=150),
                    Point5D.zero(x=145, y=155)
                ],
                                                   color=Color(r=np.uint8(0),
                                                               g=np.uint8(255),
                                                               b=np.uint8(0)),
                                                   raw_data=ds),
                Annotation.interpolate_from_points(voxels=[
                    Point5D.zero(x=238, y=101),
                    Point5D.zero(x=229, y=139)
                ],
                                                   color=Color(r=np.uint8(0),
                                                               g=np.uint8(255),
                                                               b=np.uint8(0)),
                                                   raw_data=ds),
                Annotation.interpolate_from_points(voxels=[
                    Point5D.zero(x=283, y=87),
                    Point5D.zero(x=288, y=92)
                ],
                                                   color=Color(r=np.uint8(255),
                                                               g=np.uint8(0),
                                                               b=np.uint8(0)),
                                                   raw_data=ds),
                Annotation.interpolate_from_points(voxels=[
                    Point5D.zero(x=274, y=168),
                    Point5D.zero(x=256, y=191)
                ],
                                                   color=Color(r=np.uint8(255),
                                                               g=np.uint8(0),
                                                               b=np.uint8(0)),
                                                   raw_data=ds),
            ]
            await ws.send_json(
                {"brushing_applet": [a.to_json_data() for a in brush_strokes]})
            print("done sending annotations<<<<<")

        from base64 import b64encode
        encoded_ds: str = b64encode(json.dumps(
            ds.to_json_value()).encode("utf8"),
                                    altchars=b'-_').decode("utf8")

        response_tasks = {}
        for tile in ds.roi.get_tiles(tile_shape=Shape5D(x=256, y=256, c=2),
                                     tiles_origin=Point5D.zero()):
            url = f"{session_url}/predictions/raw_data={encoded_ds}/run_id=123456/data/{tile.x[0]}-{tile.x[1]}_{tile.y[0]}-{tile.y[1]}_0-1"
            print(f"---> Requesting {url}")
            response_tasks[tile] = session.get(url)

        for tile, resp in response_tasks.items():
            async with resp as response:
                print("Status:", response.status)
                print("Content-type:", response.headers['content-type'])

                tile_bytes = await response.content.read()
                print(f"Got predictions<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")

                raw_data = np.frombuffer(tile_bytes, dtype=np.uint8).reshape(
                    2, tile.shape.y, tile.shape.x)
                a = Array5D(raw_data, axiskeys="cyx")
                # a.show_channels()

            global finished
            finished = True
示例#23
0
 def url(self) -> Url:
     url = Url.parse(self.filesystem.geturl(self.outer_path.as_posix()))
     assert url is not None
     return url.updated_with(
         hash_=f"inner_path={self.inner_path.as_posix()}")