def from_json_value(cls, value: JsonValue) -> "RPCPayload": value_obj = ensureJsonObject(value) return RPCPayload( applet_name=ensureJsonString(value_obj.get("applet_name")), method_name=ensureJsonString(value_obj.get("method_name")), arguments=ensureJsonObject(value_obj.get("arguments")), )
def from_json_value(cls, value: JsonValue) -> "HttpFs": value_obj = ensureJsonObject(value) raw_headers = value_obj.get("headers") if raw_headers is None: headers = {} else: headers_obj = ensureJsonObject(raw_headers) headers = { ensureJsonString(k): ensureJsonString(v) for k, v in headers_obj.items() } read_url = Url.parse(ensureJsonString(value_obj.get("read_url"))) if read_url is None: raise ValueError( f"Bad 'read_url' in json payload: {json.dumps(value, indent=4)}" ) raw_write_url = value_obj.get("write_url") if raw_write_url is None: write_url = None else: write_url = Url.parse(ensureJsonString(raw_write_url)) if write_url is None: raise ValueError( f"Bad write_url in HttpFs payload: {json.dumps(value, indent=4)}" ) return cls( read_url=read_url, write_url=write_url, headers=headers, )
def from_json_value(cls, data: JsonValue) -> "Annotation": data_dict = ensureJsonObject(data) raw_voxels = ensureJsonArray(data_dict.get("voxels")) voxels: Sequence[Point5D] = [ Point5D.from_json_value(raw_voxel) for raw_voxel in raw_voxels ] color = Color.from_json_data(data_dict.get("color")) raw_data = DataSource.from_json_value(data_dict.get("raw_data")) start = Point5D.min_coords(voxels) stop = Point5D.max_coords( voxels ) + 1 # +1 because slice.stop is exclusive, but max_point isinclusive scribbling_roi = Interval5D.create_from_start_stop(start=start, stop=stop) if scribbling_roi.shape.c != 1: raise ValueError( f"Annotations must not span multiple channels: {voxels}") scribblings = Array5D.allocate(scribbling_roi, dtype=np.dtype(bool), value=False) for voxel in voxels: scribblings.paint_point(point=voxel, value=True) return cls(scribblings._data, axiskeys=scribblings.axiskeys, color=color, raw_data=raw_data, location=start)
def get_user_token(self, *, code: str, redirect_uri: Url) -> "UserToken": if not self.can_redirect_to(redirect_uri): raise ValueError(f"Can't redirect to {redirect_uri.raw}") data = { "grant_type": "authorization_code", "code": code, "redirect_uri": redirect_uri.raw, "client_id": self.clientId, "client_secret": self.secret, } print(f"postin this: \n{json.dumps(data)}") resp = requests.post( "https://iam.ebrains.eu/auth/realms/hbp/protocol/openid-connect/token", allow_redirects=False, data=data) print(f"Got this response: \n\n{resp.text}") resp.raise_for_status() data = ensureJsonObject(resp.json()) return UserToken( access_token=ensureJsonString(data.get("access_token")), refresh_token=ensureJsonString(data.get("refresh_token")), # expires_in=data["expires_in"], # refresh_expires_in=data["refresh_expires_in"], # token_type=data["token_type"], # id_token=data["id_token"], # not_before_policy=data["not-before-policy"], # session_state=data["session_state"], # scope=data["scope"], )
def __setstate__(self, data: JsonValue): data_obj = ensureJsonObject(data) self.__init__( path=Path(ensureJsonString(data_obj.get("path"))), location=Interval5D.from_json_value(data_obj.get("interval")).start, filesystem=JsonableFilesystem.from_json_value(data_obj.get("filesystem")) )
def from_json_value(cls, value: JsonValue) -> "UserInfo": value_obj = ensureJsonObject(value) return UserInfo( sub=uuid.UUID(ensureJsonString(value_obj["sub"])), preferred_username=ensureJsonString( value_obj["preferred_username"]), )
def get(cls, username: str, password: str) -> "DeveloperToken": response = requests.post( "https://iam.ebrains.eu/auth/realms/hbp/protocol/openid-connect/token", auth=('developer', ''), data={ "username": username, "password": password, "grant_type": "password", }, ) print( f"Get DevToken response:\n:{json.dumps(response.json(), indent=4)}" ) response.raise_for_status() payload = ensureJsonObject(response.json()) return DeveloperToken( access_token=ensureJsonString(payload.get("access_token")), expires_in=ensureJsonInt(payload.get("expires_in")), not_before_policy=ensureJsonInt(payload.get("not-before-policy")), refresh_expires_in=ensureJsonInt( payload.get("refresh_expires_in")), refresh_token=ensureJsonString(payload.get("refresh_token")), scope=ensureJsonString(payload.get("scope")), token_type=ensureJsonString(payload.get("token_type")), )
def from_json_value(cls, value: JsonValue) -> "DataSource": json_obj = ensureJsonObject(value) datasource_name = ensureJsonString(json_obj.get("__class__")) for name, constructor in cls.datasource_from_json_constructors.items(): if name == datasource_name: return constructor(value) raise ValueError(f"Can't deserialize {json.dumps(value)}")
def from_json_data(cls, data: JsonValue, location_override: Optional[Point5D] = None ) -> "N5DatasetAttributes": raw_attributes = ensureJsonObject(data) dimensions = ensureJsonIntArray(raw_attributes.get("dimensions")) blockSize = ensureJsonIntArray(raw_attributes.get("blockSize")) axes = raw_attributes.get("axes") if axes is None: axiskeys = guess_axiskeys(dimensions) else: axiskeys = "".join(ensureJsonStringArray(axes)[::-1]).lower() location = raw_attributes.get("location") if location is None: location_5d = Point5D.zero() else: location_5d = Point5D.zero( **dict(zip(axiskeys, ensureJsonIntArray(location)[::-1]))) return N5DatasetAttributes( blockSize=Shape5D.create(raw_shape=blockSize[::-1], axiskeys=axiskeys), dimensions=Shape5D.create(raw_shape=dimensions[::-1], axiskeys=axiskeys), dataType=np.dtype(ensureJsonString(raw_attributes.get( "dataType"))).newbyteorder(">"), # type: ignore axiskeys=axiskeys, compression=N5Compressor.from_json_data( raw_attributes["compression"]), location=location_override or location_5d, )
def from_json_data(cls, data: JsonValue) -> "Color": data_dict = ensureJsonObject(data) return Color( r=np.uint8(ensureJsonInt(data_dict.get("r", 0))), g=np.uint8(ensureJsonInt(data_dict.get("g", 0))), b=np.uint8(ensureJsonInt(data_dict.get("b", 0))), )
def from_json_value(cls, data: JsonValue) -> "StructureTensorEigenvalues": data_dict = ensureJsonObject(data) return cls( innerScale=ensureJsonFloat(data_dict.get("innerScale")), outerScale=ensureJsonFloat(data_dict.get("outerScale")), window_size=ensureJsonFloat(data_dict.get("window_size", 0)), axis_2d=get_axis_2d(data_dict.get("axis_2d")), )
def from_json_value(cls: Type[SIGMA_FILTER], data: JsonValue) -> SIGMA_FILTER: data_dict = ensureJsonObject(data) return cls( sigma=ensureJsonFloat(data_dict.get("sigma")), window_size=ensureJsonFloat(data_dict.get("window_size", 0)), axis_2d=get_axis_2d(data_dict.get("axis_2d")), )
def from_json_value(cls, data: JsonValue) -> "DifferenceOfGaussians": data_dict = ensureJsonObject(data) return cls( sigma0=ensureJsonFloat(data_dict.get("sigma0")), sigma1=ensureJsonFloat(data_dict.get("sigma1")), window_size=ensureJsonFloat(data_dict.get("window_size", 0)), axis_2d=get_axis_2d(data_dict.get("axis_2d")), )
def from_json_value(cls: Type[ScaleFilter], data: JsonValue) -> ScaleFilter: data_dict = ensureJsonObject(data) return cls( scale=ensureJsonFloat(data_dict.get("scale")), window_size=ensureJsonFloat(data_dict.get("window_size", 0)), axis_2d=get_axis_2d(data_dict.get("axis_2d")), )
def from_json_value(cls, value: JsonValue) -> "N5DataSource": value_obj = ensureJsonObject(value) raw_location = value_obj.get("location") return N5DataSource( path=Path(ensureJsonString(value_obj.get("path"))), filesystem=JsonableFilesystem.from_json_value(value_obj.get("filesystem")), location=raw_location if raw_location is None else Point5D.from_json_value(raw_location), )
def get_axis_2d(data: JsonValue) -> Optional[str]: data_dict = ensureJsonObject(data) axis_2d = data_dict.get("axis_2d") if axis_2d is None: return None axis_2d = ensureJsonString(axis_2d) if len(axis_2d) != 1 or axis_2d not in Point5D.LABELS: raise ValueError(f"Bad value for axis_2d in {data}") return axis_2d
def from_json_value(cls, value: JsonValue) -> "DataSink": value_obj = ensureJsonObject(value) class_name = ensureJsonString(value_obj.get("__class__")) from webilastik.datasink.precomputed_chunks_sink import PrecomputedChunksScaleSink if class_name == PrecomputedChunksScaleSink.__name__: return PrecomputedChunksScaleSink.from_json_value(value) raise ValueError( f"Could not deserialize DataSink from {json.dumps(value)}")
def from_json_value(cls, value: JsonValue) -> "SkimageDataSource": value_obj = ensureJsonObject(value) return SkimageDataSource( path=PurePosixPath(ensureJsonString(value_obj.get("path"))), location=ensureOptional(Point5D.from_json_value, value_obj.get("location")) or Point5D.zero(), filesystem=JsonableFilesystem.from_json_value(value_obj.get("filesystem")), tile_shape=ensureOptional(Shape5D.from_json_value, value_obj.get("tile_shape")), spatial_resolution=ensureOptional(ensureJsonIntTripplet, value_obj.get("spatial_resolution")), )
def from_json_value(cls, value: JsonValue) -> "JsonableFilesystem": value_obj = ensureJsonObject(value) raw_write_url = value_obj.get("write_url") raw_headers = value_obj.get("headers") if raw_headers is None: headers = {} else: headers_obj = ensureJsonObject(raw_headers) headers = { ensureJsonString(k): ensureJsonString(v) for k, v in headers_obj.items() } return cls( read_url=Url.parse(ensureJsonString(value_obj.get("read_url"))), write_url=None if raw_write_url is None else Url.parse( ensureJsonString(raw_write_url)), headers=headers, )
def from_json_value(cls, value: JsonValue) -> "BucketObject": value_dict = ensureJsonObject(value) return BucketObject( hash_=ensureJsonString(value_dict.get("hash")), last_modified=datetime.fromisoformat( ensureJsonString(value_dict.get("last_modified"))), bytes_=ensureJsonInt(value_dict.get("bytes")), name=PurePosixPath(ensureJsonString(value_dict.get("name"))), content_type=ensureJsonString(value_dict.get("content_type")), )
def from_json_value(cls, value: JsonValue) -> "JobResources": value_obj = ensureJsonObject(value) return JobResources( Memory=Memory.from_json_value(value_obj.get("Memory")), Runtime=Seconds(ensureJsonInt(value_obj.get("Runtime"))), CPUs=ensureJsonInt(value_obj.get("CPUs")), Nodes=ensureJsonInt(value_obj.get("Nodes")), CPUsPerNode=ensureJsonInt(value_obj.get("CPUsPerNode")), Reservation=ensureJsonString(value_obj.get("Reservation")), )
def from_json_value(cls, data: JsonValue) -> "Annotation": data_dict = ensureJsonObject(data) raw_voxels = ensureJsonArray(data_dict.get("voxels")) voxels: Sequence[Point5D] = [ Point5D.from_json_value(raw_voxel) for raw_voxel in raw_voxels ] raw_data = DataSource.from_json_value(data_dict.get("raw_data")) return cls.from_voxels(voxels=voxels, raw_data=raw_data)
def from_json_value(cls, value: JsonValue) -> "PrecomputedChunksScaleSink": value_obj = ensureJsonObject(value) return PrecomputedChunksScaleSink( filesystem=JsonableFilesystem.from_json_value( value_obj.get("filesystem")), info_dir=PurePosixPath(ensureJsonString( value_obj.get("info_dir"))), scale=PrecomputedChunksScale.from_json_value( value_obj.get("scale")), dtype=np.dtype(ensureJsonString(value_obj.get("dtype"))), num_channels=ensureJsonInt(value_obj.get("num_channels")))
def from_json_value(cls, data: JsonValue): data_dict = ensureJsonObject(data) return PrecomputedChunksInfo( type_=ensureJsonString(data_dict.get("type")), data_type=np.dtype(ensureJsonString(data_dict.get("data_type"))), #type: ignore num_channels=ensureJsonInt(data_dict.get("num_channels")), scales=tuple( PrecomputedChunksScale.from_json_value(raw_scale) for raw_scale in ensureJsonArray(data_dict.get("scales")) ) )
def from_json_value(cls, value: JsonValue) -> "PrecomputedChunksScaleSink": value_obj = ensureJsonObject(value) return PrecomputedChunksScaleSink( filesystem=JsonableFilesystem.from_json_value( value_obj.get("filesystem")), base_path=Path(ensureJsonString(value_obj.get("base_path"))), scale=PrecomputedChunksScale5D.from_json_value( value_obj.get("scale")), dtype=np.dtype(ensureJsonString( value_obj.get("dtype"))), #type: ignore )
def from_json_value(cls, value: JsonValue) -> "H5DataSource": value_obj = ensureJsonObject(value) raw_location = value_obj.get("location") return H5DataSource( outer_path=Path(ensureJsonString(value_obj.get("outer_path"))), inner_path=PurePosixPath( ensureJsonString(value_obj.get("inner_path"))), filesystem=JsonableFilesystem.from_json_value( value_obj.get("filesystem")), location=Point5D.zero() if raw_location is None else Point5D.from_json_value(raw_location), )
def from_json_value(cls, value: JsonValue) -> "SkimageDataSource": value_obj = ensureJsonObject(value) raw_location = value_obj.get("location") raw_tile_shape = value_obj.get("tile_shape") return SkimageDataSource( path=Path(ensureJsonString(value_obj.get("path"))), location=Point5D.zero() if raw_location is None else Point5D.from_json_value(raw_location), filesystem=JsonableFilesystem.from_json_value( value_obj.get("filesystem")), tile_shape=None if raw_tile_shape is None else Shape5D.from_json_value(raw_tile_shape))
def _get_tmp_url(self, path: str) -> Url: object_url = self.url.concatpath(path).updated_with( search={ **self.url.search, "redirect": "false" }) response = self.session.get(object_url.raw) response.raise_for_status() response_obj = ensureJsonObject(response.json()) cscs_url = Url.parse(ensureJsonString(response_obj.get("url"))) assert cscs_url is not None return cscs_url
def close_callback(f: RemoteFile): if mode == "r": return _ = f.seek(0) payload = f.read() url = self.url.concatpath(path).raw response = self.session.put(url) response.raise_for_status() response_obj = ensureJsonObject(response.json()) url = ensureJsonString(response_obj.get("url")) response = self.cscs_session.put(url, data=payload) response.raise_for_status()
def from_json_value(cls, value: JsonValue) -> "JobDescription": value_obj = ensureJsonObject(value) return JobDescription( Name=ensureJsonString(value_obj.get("Name")), Project=ensureJsonString(value_obj.get("Project")), Executable=ensureJsonString(value_obj.get("Executable")), Arguments=tuple( ensureJsonString(arg) for arg in ensureJsonArray(value_obj.get("Arguments"))), Resources=JobResources.from_json_value(value_obj.get("Resources")), Environment={ k: ensureJsonString(v) for k, v in ensureJsonObject(value_obj.get( "Environment")).items() }, Exports=ensureJsonStringArray(value_obj.get("Exports")), Imports=tuple( JobImport.from_json_value(v) for v in ensureJsonArray(value_obj.get("Imports"))), Tags=ensureJsonStringArray(value_obj.get("Tags")), )