def get_user_token(self, *, code: str, redirect_uri: Url) -> "UserToken": if not self.can_redirect_to(redirect_uri): raise ValueError(f"Can't redirect to {redirect_uri.raw}") data = { "grant_type": "authorization_code", "code": code, "redirect_uri": redirect_uri.raw, "client_id": self.clientId, "client_secret": self.secret, } print(f"postin this: \n{json.dumps(data)}") resp = requests.post( "https://iam.ebrains.eu/auth/realms/hbp/protocol/openid-connect/token", allow_redirects=False, data=data) print(f"Got this response: \n\n{resp.text}") resp.raise_for_status() data = ensureJsonObject(resp.json()) return UserToken( access_token=ensureJsonString(data.get("access_token")), refresh_token=ensureJsonString(data.get("refresh_token")), # expires_in=data["expires_in"], # refresh_expires_in=data["refresh_expires_in"], # token_type=data["token_type"], # id_token=data["id_token"], # not_before_policy=data["not-before-policy"], # session_state=data["session_state"], # scope=data["scope"], )
def from_json_value(cls, value: JsonValue) -> "HttpFs": value_obj = ensureJsonObject(value) raw_headers = value_obj.get("headers") if raw_headers is None: headers = {} else: headers_obj = ensureJsonObject(raw_headers) headers = { ensureJsonString(k): ensureJsonString(v) for k, v in headers_obj.items() } read_url = Url.parse(ensureJsonString(value_obj.get("read_url"))) if read_url is None: raise ValueError( f"Bad 'read_url' in json payload: {json.dumps(value, indent=4)}" ) raw_write_url = value_obj.get("write_url") if raw_write_url is None: write_url = None else: write_url = Url.parse(ensureJsonString(raw_write_url)) if write_url is None: raise ValueError( f"Bad write_url in HttpFs payload: {json.dumps(value, indent=4)}" ) return cls( read_url=read_url, write_url=write_url, headers=headers, )
def from_json_value(cls, value: JsonValue) -> "UserInfo": value_obj = ensureJsonObject(value) return UserInfo( sub=uuid.UUID(ensureJsonString(value_obj["sub"])), preferred_username=ensureJsonString( value_obj["preferred_username"]), )
def from_json_value(cls, value: JsonValue) -> "RPCPayload": value_obj = ensureJsonObject(value) return RPCPayload( applet_name=ensureJsonString(value_obj.get("applet_name")), method_name=ensureJsonString(value_obj.get("method_name")), arguments=ensureJsonObject(value_obj.get("arguments")), )
def get(cls, username: str, password: str) -> "DeveloperToken": response = requests.post( "https://iam.ebrains.eu/auth/realms/hbp/protocol/openid-connect/token", auth=('developer', ''), data={ "username": username, "password": password, "grant_type": "password", }, ) print( f"Get DevToken response:\n:{json.dumps(response.json(), indent=4)}" ) response.raise_for_status() payload = ensureJsonObject(response.json()) return DeveloperToken( access_token=ensureJsonString(payload.get("access_token")), expires_in=ensureJsonInt(payload.get("expires_in")), not_before_policy=ensureJsonInt(payload.get("not-before-policy")), refresh_expires_in=ensureJsonInt( payload.get("refresh_expires_in")), refresh_token=ensureJsonString(payload.get("refresh_token")), scope=ensureJsonString(payload.get("scope")), token_type=ensureJsonString(payload.get("token_type")), )
def from_json_value(cls, value: JsonValue) -> "BucketObject": value_dict = ensureJsonObject(value) return BucketObject( hash_=ensureJsonString(value_dict.get("hash")), last_modified=datetime.fromisoformat( ensureJsonString(value_dict.get("last_modified"))), bytes_=ensureJsonInt(value_dict.get("bytes")), name=PurePosixPath(ensureJsonString(value_dict.get("name"))), content_type=ensureJsonString(value_dict.get("content_type")), )
def from_json_value(cls, data: JsonValue): data_dict = ensureJsonObject(data) return PrecomputedChunksInfo( type_=ensureJsonString(data_dict.get("type")), data_type=np.dtype(ensureJsonString(data_dict.get("data_type"))), #type: ignore num_channels=ensureJsonInt(data_dict.get("num_channels")), scales=tuple( PrecomputedChunksScale.from_json_value(raw_scale) for raw_scale in ensureJsonArray(data_dict.get("scales")) ) )
def from_json_value(cls, value: JsonValue) -> "PrecomputedChunksScaleSink": value_obj = ensureJsonObject(value) return PrecomputedChunksScaleSink( filesystem=JsonableFilesystem.from_json_value( value_obj.get("filesystem")), info_dir=PurePosixPath(ensureJsonString( value_obj.get("info_dir"))), scale=PrecomputedChunksScale.from_json_value( value_obj.get("scale")), dtype=np.dtype(ensureJsonString(value_obj.get("dtype"))), num_channels=ensureJsonInt(value_obj.get("num_channels")))
def from_json_value(cls, value: JsonValue) -> "PrecomputedChunksScaleSink": value_obj = ensureJsonObject(value) return PrecomputedChunksScaleSink( filesystem=JsonableFilesystem.from_json_value( value_obj.get("filesystem")), base_path=Path(ensureJsonString(value_obj.get("base_path"))), scale=PrecomputedChunksScale5D.from_json_value( value_obj.get("scale")), dtype=np.dtype(ensureJsonString( value_obj.get("dtype"))), #type: ignore )
def from_json_value(cls, value: JsonValue) -> "H5DataSource": value_obj = ensureJsonObject(value) raw_location = value_obj.get("location") return H5DataSource( outer_path=Path(ensureJsonString(value_obj.get("outer_path"))), inner_path=PurePosixPath( ensureJsonString(value_obj.get("inner_path"))), filesystem=JsonableFilesystem.from_json_value( value_obj.get("filesystem")), location=Point5D.zero() if raw_location is None else Point5D.from_json_value(raw_location), )
def from_json_value(cls, data: JsonValue): data_dict = ensureJsonObject(data) type_ = ensureJsonString(data_dict.get("type")) if type_ != "image": raise ValueError(f"Bad 'type' marker value: {type_}") return PrecomputedChunksInfo( type_=type_, data_type=np.dtype(ensureJsonString(data_dict.get("data_type"))), num_channels=ensureJsonInt(data_dict.get("num_channels")), scales=tuple( PrecomputedChunksScale.from_json_value(raw_scale) for raw_scale in ensureJsonArray(data_dict.get("scales"))))
def from_json_value(cls, value: JsonValue) -> "H5DataSource": value_obj = ensureJsonObject(value) return H5DataSource(outer_path=PurePosixPath( ensureJsonString(value_obj.get("outer_path"))), inner_path=PurePosixPath( ensureJsonString(value_obj.get("inner_path"))), filesystem=JsonableFilesystem.from_json_value( value_obj.get("filesystem")), location=ensureOptional(Point5D.from_json_value, value_obj.get("location")) or Point5D.zero(), spatial_resolution=ensureJsonIntTripplet( value_obj.get("spatial_resolution")))
def from_json_value(cls, value: JsonValue) -> "BucketFs": value_obj = ensureJsonObject(value) raw_token = value_obj.get("ebrains_user_token") if raw_token is not None: token = UserToken.from_json_value(raw_token) else: token_resut = UserToken.get_global_login_token() if isinstance(token_resut, UsageError): raise token_resut token = token_resut return BucketFs( bucket_name=ensureJsonString(value_obj.get("bucket_name")), prefix=PurePosixPath(ensureJsonString(value_obj.get("prefix"))), ebrains_user_token=token)
def __setstate__(self, value_obj: Dict[str, Any]): raw_token = value_obj.get("ebrains_user_token") if raw_token is not None: token = UserToken.from_json_value(raw_token) else: token_resut = UserToken.get_global_login_token() if isinstance(token_resut, UsageError): raise token_resut token = token_resut self.__init__( bucket_name=ensureJsonString(value_obj.get("bucket_name")), prefix=PurePosixPath(ensureJsonString(value_obj.get("prefix"))), ebrains_user_token=token, )
def from_json_value(cls, value: JsonValue) -> "DataSource": json_obj = ensureJsonObject(value) datasource_name = ensureJsonString(json_obj.get("__class__")) for name, constructor in cls.datasource_from_json_constructors.items(): if name == datasource_name: return constructor(value) raise ValueError(f"Can't deserialize {json.dumps(value)}")
def __setstate__(self, data: JsonValue): data_obj = ensureJsonObject(data) self.__init__( path=Path(ensureJsonString(data_obj.get("path"))), location=Interval5D.from_json_value(data_obj.get("interval")).start, filesystem=JsonableFilesystem.from_json_value(data_obj.get("filesystem")) )
def from_json_value(cls, data: JsonValue) -> "PrecomputedChunksEncoder": label = ensureJsonString(data) if label == "raw": return RawEncoder() if label == "jpeg" or label == "jpg": return JpegEncoder() raise ValueError(f"Bad encoding value: {label}")
async def load_project(self, request: web.Request) -> web.Response: payload = await request.json() filesystem = BucketFs.from_json_value(payload.get("fs")) file_path = PurePosixPath( ensureJsonString(payload.get("project_file_path"))) if len(file_path.parts) == 0 or ".." in file_path.parts: return web.Response(status=400, text=f"Bad project file path: {file_path}") ilp_bytes = await asyncio.wrap_future( self.executor.submit( do_load_project_bytes, filesystem=filesystem, file_path=file_path, )) new_workflow_result = PixelClassificationWorkflow.from_ilp_bytes( ilp_bytes=ilp_bytes, on_async_change=lambda: self.enqueue_user_interaction(lambda: None ), #FIXME? executor=self.executor, priority_executor=self.priority_executor, allowed_protocols=(Protocol.HTTP, Protocol.HTTPS), ) if isinstance(new_workflow_result, Exception): return web.Response( status=400, text=f"Could not load project: {new_workflow_result}") self.workflow = new_workflow_result self._update_clients() return web.Response( status=200, text=f"Project saved to {filesystem.geturl(file_path.as_posix())}")
def from_json_data(cls, data: JsonValue, location_override: Optional[Point5D] = None ) -> "N5DatasetAttributes": raw_attributes = ensureJsonObject(data) dimensions = ensureJsonIntArray(raw_attributes.get("dimensions")) blockSize = ensureJsonIntArray(raw_attributes.get("blockSize")) axes = raw_attributes.get("axes") if axes is None: axiskeys = guess_axiskeys(dimensions) else: axiskeys = "".join(ensureJsonStringArray(axes)[::-1]).lower() location = raw_attributes.get("location") if location is None: location_5d = Point5D.zero() else: location_5d = Point5D.zero( **dict(zip(axiskeys, ensureJsonIntArray(location)[::-1]))) return N5DatasetAttributes( blockSize=Shape5D.create(raw_shape=blockSize[::-1], axiskeys=axiskeys), dimensions=Shape5D.create(raw_shape=dimensions[::-1], axiskeys=axiskeys), dataType=np.dtype(ensureJsonString(raw_attributes.get( "dataType"))).newbyteorder(">"), # type: ignore axiskeys=axiskeys, compression=N5Compressor.from_json_data( raw_attributes["compression"]), location=location_override or location_5d, )
def from_json_value(cls, value: JsonValue) -> "Memory": value_str = ensureJsonString(value) match = re.compile(r"(?<amount>\d+)(?<unit>[G]))", re.IGNORECASE).fullmatch(value_str) if not match: raise ValueError(f"Bad memory value: {value}") return Memory(amount=int(match.group("amount")), unit=cast(Literal["G"], match.group("unit")))
def from_json_value(cls, value: JsonValue) -> "N5DataSource": value_obj = ensureJsonObject(value) raw_location = value_obj.get("location") return N5DataSource( path=Path(ensureJsonString(value_obj.get("path"))), filesystem=JsonableFilesystem.from_json_value(value_obj.get("filesystem")), location=raw_location if raw_location is None else Point5D.from_json_value(raw_location), )
def __setstate__(self, value_obj: JsonObject): self.__init__( path=PurePosixPath(ensureJsonString(value_obj.get("path"))), location=ensureOptional(Point5D.from_json_value, value_obj.get("location")) or Point5D.zero(), filesystem=JsonableFilesystem.from_json_value(value_obj.get("filesystem")), tile_shape=ensureOptional(Shape5D.from_json_value, value_obj.get("tile_shape")), spatial_resolution=ensureOptional(ensureJsonIntTripplet, value_obj.get("spatial_resolution")), )
def get_axis_2d(data: JsonValue) -> Optional[str]: data_dict = ensureJsonObject(data) axis_2d = data_dict.get("axis_2d") if axis_2d is None: return None axis_2d = ensureJsonString(axis_2d) if len(axis_2d) != 1 or axis_2d not in Point5D.LABELS: raise ValueError(f"Bad value for axis_2d in {data}") return axis_2d
def from_json_value(cls, value: JsonValue) -> "DataSink": value_obj = ensureJsonObject(value) class_name = ensureJsonString(value_obj.get("__class__")) from webilastik.datasink.precomputed_chunks_sink import PrecomputedChunksScaleSink if class_name == PrecomputedChunksScaleSink.__name__: return PrecomputedChunksScaleSink.from_json_value(value) raise ValueError( f"Could not deserialize DataSink from {json.dumps(value)}")
def from_json_value(cls, value: JsonValue) -> "JsonableFilesystem": value_obj = ensureJsonObject(value) raw_write_url = value_obj.get("write_url") raw_headers = value_obj.get("headers") if raw_headers is None: headers = {} else: headers_obj = ensureJsonObject(raw_headers) headers = { ensureJsonString(k): ensureJsonString(v) for k, v in headers_obj.items() } return cls( read_url=Url.parse(ensureJsonString(value_obj.get("read_url"))), write_url=None if raw_write_url is None else Url.parse( ensureJsonString(raw_write_url)), headers=headers, )
def from_json_value(cls, value: JsonValue) -> "SkimageDataSource": value_obj = ensureJsonObject(value) return SkimageDataSource( path=PurePosixPath(ensureJsonString(value_obj.get("path"))), location=ensureOptional(Point5D.from_json_value, value_obj.get("location")) or Point5D.zero(), filesystem=JsonableFilesystem.from_json_value(value_obj.get("filesystem")), tile_shape=ensureOptional(Shape5D.from_json_value, value_obj.get("tile_shape")), spatial_resolution=ensureOptional(ensureJsonIntTripplet, value_obj.get("spatial_resolution")), )
def from_json_value(cls, value: JsonValue) -> "JobResources": value_obj = ensureJsonObject(value) return JobResources( Memory=Memory.from_json_value(value_obj.get("Memory")), Runtime=Seconds(ensureJsonInt(value_obj.get("Runtime"))), CPUs=ensureJsonInt(value_obj.get("CPUs")), Nodes=ensureJsonInt(value_obj.get("Nodes")), CPUsPerNode=ensureJsonInt(value_obj.get("CPUsPerNode")), Reservation=ensureJsonString(value_obj.get("Reservation")), )
def _get_tmp_url(self, path: str) -> Url: object_url = self.url.concatpath(path).updated_with( search={ **self.url.search, "redirect": "false" }) response = self.session.get(object_url.raw) response.raise_for_status() response_obj = ensureJsonObject(response.json()) cscs_url = Url.parse(ensureJsonString(response_obj.get("url"))) assert cscs_url is not None return cscs_url
def close_callback(f: RemoteFile): if mode == "r": return _ = f.seek(0) payload = f.read() url = self.url.concatpath(path).raw response = self.session.put(url) response.raise_for_status() response_obj = ensureJsonObject(response.json()) url = ensureJsonString(response_obj.get("url")) response = self.cscs_session.put(url, data=payload) response.raise_for_status()
def from_json_value(cls, value: JsonValue) -> "JobDescription": value_obj = ensureJsonObject(value) return JobDescription( Name=ensureJsonString(value_obj.get("Name")), Project=ensureJsonString(value_obj.get("Project")), Executable=ensureJsonString(value_obj.get("Executable")), Arguments=tuple( ensureJsonString(arg) for arg in ensureJsonArray(value_obj.get("Arguments"))), Resources=JobResources.from_json_value(value_obj.get("Resources")), Environment={ k: ensureJsonString(v) for k, v in ensureJsonObject(value_obj.get( "Environment")).items() }, Exports=ensureJsonStringArray(value_obj.get("Exports")), Imports=tuple( JobImport.from_json_value(v) for v in ensureJsonArray(value_obj.get("Imports"))), Tags=ensureJsonStringArray(value_obj.get("Tags")), )