def from_cookie(cls, request: web.Request) -> Optional["EbrainsSession"]: access_token = request.cookies.get(cls.AUTH_COOKIE_KEY) if access_token is None: return None user_token = UserToken(access_token=access_token) if not user_token.is_valid(): return None return EbrainsSession(user_token=user_token)
def from_json_value(cls, value: JsonValue) -> "BucketFs": value_obj = ensureJsonObject(value) raw_token = value_obj.get("ebrains_user_token") if raw_token is not None: token = UserToken.from_json_value(raw_token) else: token_resut = UserToken.get_global_login_token() if isinstance(token_resut, UsageError): raise token_resut token = token_resut return BucketFs( bucket_name=ensureJsonString(value_obj.get("bucket_name")), prefix=PurePosixPath(ensureJsonString(value_obj.get("prefix"))), ebrains_user_token=token)
def __setstate__(self, value_obj: Dict[str, Any]): raw_token = value_obj.get("ebrains_user_token") if raw_token is not None: token = UserToken.from_json_value(raw_token) else: token_resut = UserToken.get_global_login_token() if isinstance(token_resut, UsageError): raise token_resut token = token_resut self.__init__( bucket_name=ensureJsonString(value_obj.get("bucket_name")), prefix=PurePosixPath(ensureJsonString(value_obj.get("prefix"))), ebrains_user_token=token, )
async def forward_chunk_request(self, request: web.Request) -> web.Response: """Redirects a precomp chunk request to the original URL""" encoded_original_url = request.match_info.get("encoded_original_url") if not encoded_original_url: return web.Response(status=400, text="Missing parameter: url") decoded_url = b64decode(encoded_original_url, altchars=b'-_').decode('utf8') url = Url.parse(decoded_url) if url is None: return web.Response(status=400, text=f"Bad url: {decoded_url}") rest = request.match_info.get("rest", "").lstrip("/") tile_url = url.joinpath(rest) if tile_url.hostname != "data-proxy.ebrains.eu": raise web.HTTPFound(location=tile_url.schemeless_raw) token = UserToken.get_global_login_token() if isinstance(token, UsageError): return web.Response(status=403, text="Token has expired") # FIXME async with self.http_client_session.get( tile_url.schemeless_raw, ssl=self.ssl_context, headers=token.as_auth_header(), ) as response: cscs_url = (await response.json())["url"] raise web.HTTPFound(location=cscs_url)
def get_user_token(self, *, code: str, redirect_uri: Url) -> "UserToken": if not self.can_redirect_to(redirect_uri): raise ValueError(f"Can't redirect to {redirect_uri.raw}") data = { "grant_type": "authorization_code", "code": code, "redirect_uri": redirect_uri.raw, "client_id": self.clientId, "client_secret": self.secret, } print(f"postin this: \n{json.dumps(data)}") resp = requests.post( "https://iam.ebrains.eu/auth/realms/hbp/protocol/openid-connect/token", allow_redirects=False, data=data) print(f"Got this response: \n\n{resp.text}") resp.raise_for_status() data = ensureJsonObject(resp.json()) return UserToken( access_token=ensureJsonString(data.get("access_token")), refresh_token=ensureJsonString(data.get("refresh_token")), # expires_in=data["expires_in"], # refresh_expires_in=data["refresh_expires_in"], # token_type=data["token_type"], # id_token=data["id_token"], # not_before_policy=data["not-before-policy"], # session_state=data["session_state"], # scope=data["scope"], )
def get_test_output_bucket_fs() -> BucketFs: now = datetime.now() now_str = f"{now.year:02}y{now.month:02}m{now.day:02}d__{now.hour:02}h{now.minute:02}m{now.second:02}s" return BucketFs( bucket_name="hbp-image-service", prefix=PurePosixPath(f"/test-{now_str}"), ebrains_user_token=UserToken.get_global_token_or_raise(), )
async def from_cookie( cls, request: web.Request, http_client_session: ClientSession) -> Optional["EbrainsLogin"]: access_token = request.cookies.get(cls.AUTH_COOKIE_KEY) if access_token is None: return None user_token = UserToken(access_token=access_token) if not await user_token.is_valid(http_client_session): return None return EbrainsLogin(user_token=user_token)
async def stripped_precomputed_info(self, request: web.Request) -> web.Response: """Serves a precomp info stripped of all but one scales""" resolution_x = request.match_info.get("resolution_x") resolution_y = request.match_info.get("resolution_y") resolution_z = request.match_info.get("resolution_z") if resolution_x is None or resolution_y is None or resolution_z is None: return web.Response( status=400, text= f"Bad resolution: {resolution_x}_{resolution_y}_{resolution_z}" ) try: resolution = (int(resolution_x), int(resolution_y), int(resolution_z)) except Exception: return web.Response( status=400, text= f"Bad resolution: {resolution_x}_{resolution_y}_{resolution_z}" ) encoded_original_url = request.match_info.get("encoded_original_url") if not encoded_original_url: return web.Response(status=400, text="Missing parameter: url") decoded_url = b64decode(encoded_original_url, altchars=b'-_').decode('utf8') base_url = Url.parse(decoded_url) if base_url is None: return web.Response(status=400, text=f"Bad url: {decoded_url}") info_url = base_url.joinpath("info") logger.debug(f"Will request this info: {info_url.schemeless_raw}") token = UserToken.get_global_login_token() if isinstance(token, UsageError): return web.Response(status=403, text="Token has expired") # FIXME async with self.http_client_session.get( info_url.schemeless_raw, ssl=self.ssl_context, headers=token.as_auth_header() if info_url.hostname == "data-proxy.ebrains.eu" else {}, params={"redirect": "true"} if info_url.hostname == "data-proxy.ebrains.eu" else {}, ) as response: response_text = await response.text() if response.status // 100 != 2: return web.Response(status=response.status, text=response_text) info = PrecomputedChunksInfo.from_json_value( json.loads(response_text)) stripped_info = info.stripped(resolution=resolution) return web.json_response(stripped_info.to_json_value())
def try_create( cls, bucket_name: str, prefix: PurePosixPath, ebrains_user_token: "UserToken | None" = None ) -> "BucketFs | UsageError": token_result = ebrains_user_token or UserToken.get_global_login_token() if isinstance(token_result, UsageError): return token_result return BucketFs(bucket_name=bucket_name, prefix=prefix, ebrains_user_token=token_result)
def __init__(self, bucket_name: str, prefix: PurePosixPath, ebrains_user_token: UserToken): self.bucket_name = bucket_name self.prefix = prefix self.ebrains_user_token = ebrains_user_token self.bucket_url = self.API_URL.concatpath(bucket_name) self.url = self.bucket_url.concatpath(prefix) super().__init__() self.session = requests.Session() self.session.headers.update(ebrains_user_token.as_auth_header()) self.cscs_session = requests.Session() self.pid = os.getpid()
def try_from_url(cls, url: Url, ebrains_user_token: "UserToken | None" = None ) -> "BucketFs | Exception": if not url.raw.startswith(cls.API_URL.raw): return Exception( f"Url must be inside the data-proxy ({cls.API_URL}. Got {url}") bucket_name_part_index = len(cls.API_URL.path.parts) if len(url.path.parts) <= bucket_name_part_index: return Exception(f"Bad bucket url: {url}") token_result = ebrains_user_token or UserToken.get_global_login_token() if isinstance(token_result, UsageError): return token_result return BucketFs( bucket_name=url.path.parts[bucket_name_part_index], prefix=PurePosixPath("/".join( url.path.parts[bucket_name_part_index + 1:])), ebrains_user_token=token_result, )
async def get_user_token( self, *, code: str, redirect_uri: Url, http_client_session: ClientSession) -> "UserToken": if not self.can_redirect_to(redirect_uri): raise ValueError(f"Can't redirect to {redirect_uri.raw}") data = { "grant_type": "authorization_code", "code": code, "redirect_uri": redirect_uri.raw, "client_id": self.clientId, "client_secret": self.secret, } resp = await http_client_session.request( method="post", url= "https://iam.ebrains.eu/auth/realms/hbp/protocol/openid-connect/token", allow_redirects=False, data=data) resp.raise_for_status() data = ensureJsonObject(await resp.json()) return UserToken.from_json_value(data)
help="Number of minutes this workflow can run for") parser.add_argument("--ebrains-user-access-token", type=str, required=True) parser.add_argument("--listen-socket", type=Path, required=True) subparsers = parser.add_subparsers(required=False, help="tunnel stuff") tunnel_parser = subparsers.add_parser( "tunnel", help="Creates a reverse tunnel to an orchestrator") tunnel_parser.add_argument("--remote-username", type=str, required=True) tunnel_parser.add_argument("--remote-host", required=True) tunnel_parser.add_argument("--remote-unix-socket", type=Path, required=True) args = parser.parse_args() UserToken.login_globally(token=UserToken( access_token=args.ebrains_user_access_token)) executor = get_executor(hint="server_tile_handler", max_workers=multiprocessing.cpu_count()) if "remote_username" in vars(args): server_context = ReverseSshTunnel( remote_username=args.remote_username, remote_host=args.remote_host, remote_unix_socket=args.remote_unix_socket, local_unix_socket=args.listen_socket, ) else: server_context = contextlib.nullcontext() with server_context:
async def main(): ilastik_root_url = Url.parse("https://app.ilastik.org/") assert ilastik_root_url is not None data_url = Url.parse( "precomputed://https://app.ilastik.org/public/images/c_cells_2.precomputed" ) assert data_url is not None datasources = try_get_datasources_from_url(url=data_url) if isinstance(datasources, Exception): raise datasources assert not isinstance(datasources, Exception) ds = datasources[0] token = UserToken.from_environment() assert isinstance(token, UserToken) async with aiohttp.ClientSession( cookies={EbrainsLogin.AUTH_COOKIE_KEY: token.access_token }) as session: print(f"Creating new session--------------") async with session.post(ilastik_root_url.concatpath("api/session").raw, json={"session_duration_minutes": 15}) as response: response.raise_for_status() session_data: Dict[str, Any] = await response.json() session_id = session_data["id"] print( f"Done creating session: {json.dumps(session_data)} <<<<<<<<<<<<<<<<<<" ) for _ in range(10): response = await session.get( ilastik_root_url.concatpath(f"api/session/{session_id}").raw) response.raise_for_status() session_status = await response.json() if session_status["status"] == "ready": session_url = session_status["url"] break print(f"Session {session_id} is notready yet") _ = await asyncio.sleep(2) else: raise RuntimeError("Given up waiting on session") # exit(1) async with session.ws_connect(f"{session_url}/ws") as ws: _ = asyncio.get_event_loop().create_task(read_server_status(ws)) print("sending some feature extractors=======") await ws.send_json( RPCPayload(applet_name="feature_selection_applet", method_name="add_feature_extractors", arguments={ "feature_extractors": tuple(fe.to_json_value() for fe in get_sample_feature_extractors()) }).to_json_value()) print("done sending feature extractors<<<<<") print("sending some annotations=======") default_label_names = ["Foreground", "Background"] for label_name, label in zip( default_label_names, get_sample_c_cells_pixel_annotations( override_datasource=ds)): for a in label.annotations: await ws.send_json( RPCPayload(applet_name="brushing_applet", method_name="add_annotation", arguments={ "label_name": label_name, "annotation": a.to_json_data(), }).to_json_value()) print("done sending annotations<<<<<") await asyncio.sleep(2) print("Enabling live update=======") await ws.send_json( RPCPayload(applet_name="pixel_classification_applet", method_name="set_live_update", arguments={ "live_update": True }).to_json_value()) await asyncio.sleep(2) # from base64 import b64encode # encoded_ds: str = b64encode(json.dumps(ds.to_json_value()).encode("utf8"), altchars=b'-_').decode("utf8") # response_tasks = {} # for tile in ds.roi.get_tiles(tile_shape=Shape5D(x=256, y=256, c=2), tiles_origin=Point5D.zero()): # url = f"{session_url}/predictions/raw_data={encoded_ds}/generation={classifier_generation}/data/{tile.x[0]}-{tile.x[1]}_{tile.y[0]}-{tile.y[1]}_0-1" # print(f"---> Requesting {url}") # response_tasks[tile] = session.get(url) # for tile, resp in response_tasks.items(): # async with resp as response: # print("Status:", response.status) # print("Content-type:", response.headers['content-type']) # if response.status // 100 != 2: # raise Exception(f"Error: {(await response.content.read()).decode('utf8')}") # tile_bytes = await response.content.read() # print(f"Got predictions<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<") # raw_data = np.frombuffer(tile_bytes, dtype=np.uint8).reshape(2, tile.shape.y, tile.shape.x) # Array5D(raw_data, axiskeys="cyx").show_channels() hbp_image_service_bucket_fs = BucketFs( bucket_name="hbp-image-service", ebrains_user_token=UserToken.get_global_token_or_raise(), prefix=PurePosixPath("/"), ) predictions_export_datasink = create_precomputed_chunks_sink( shape=ds.shape.updated(c=2), dtype=np.dtype("float32"), chunk_size=ds.tile_shape.updated(c=2), fs=hbp_image_service_bucket_fs, ) print(f"Sending predictions job request??????") await ws.send_json( RPCPayload(applet_name="export_applet", method_name="start_export_job", arguments={ "datasource": ds.to_json_value(), "datasink": predictions_export_datasink.to_json_value(), }).to_json_value()) simple_segmentation_datasinks = [ create_precomputed_chunks_sink( shape=ds.shape.updated(c=3), dtype=np.dtype("uint8"), chunk_size=ds.tile_shape.updated(c=3), fs=hbp_image_service_bucket_fs), create_precomputed_chunks_sink( shape=ds.shape.updated(c=3), dtype=np.dtype("uint8"), chunk_size=ds.tile_shape.updated(c=3), fs=hbp_image_service_bucket_fs), ] print(f"Sending simple segmentation job request??????") await ws.send_json( RPCPayload(applet_name="export_applet", method_name="start_simple_segmentation_export_job", arguments={ "datasource": ds.to_json_value(), "datasinks": tuple(ds.to_json_value() for ds in simple_segmentation_datasinks), }).to_json_value()) print(f"---> Job successfully scheduled? Waiting for a while") await asyncio.sleep(15) print(f"Done waiting. Checking outputs") predictions_output = PrecomputedChunksDataSource( filesystem=hbp_image_service_bucket_fs, path=predictions_export_datasink.path, resolution=(1, 1, 1)) for tile in predictions_output.roi.get_datasource_tiles(): tile.retrieve().as_uint8(normalized=True) #.show_channels() segmentation_output_1 = PrecomputedChunksDataSource( filesystem=hbp_image_service_bucket_fs, path=simple_segmentation_datasinks[1].path, resolution=(1, 1, 1)) for tile in segmentation_output_1.roi.get_datasource_tiles(): tile.retrieve() #.show_images() close_url = f"{session_url}/close" print(f"Closing session py sending delete to {close_url}") r = await session.delete(close_url) r.raise_for_status() global finished finished = True