def from_json_value(cls, data: JsonValue) -> "Annotation": data_dict = ensureJsonObject(data) raw_voxels = ensureJsonArray(data_dict.get("voxels")) voxels: Sequence[Point5D] = [ Point5D.from_json_value(raw_voxel) for raw_voxel in raw_voxels ] color = Color.from_json_data(data_dict.get("color")) raw_data = DataSource.from_json_value(data_dict.get("raw_data")) start = Point5D.min_coords(voxels) stop = Point5D.max_coords( voxels ) + 1 # +1 because slice.stop is exclusive, but max_point isinclusive scribbling_roi = Interval5D.create_from_start_stop(start=start, stop=stop) if scribbling_roi.shape.c != 1: raise ValueError( f"Annotations must not span multiple channels: {voxels}") scribblings = Array5D.allocate(scribbling_roi, dtype=np.dtype(bool), value=False) for voxel in voxels: scribblings.paint_point(point=voxel, value=True) return cls(scribblings._data, axiskeys=scribblings.axiskeys, color=color, raw_data=raw_data, location=start)
def _set_json_state(self, state: JsonValue): raw_feature_array = ensureJsonArray(state) return self.feature_extractors.set_value([ self._item_from_json_data(raw_feature) for raw_feature in raw_feature_array ], confirmer=lambda msg: True)
def from_json_value(cls, data: JsonValue) -> "Annotation": data_dict = ensureJsonObject(data) raw_voxels = ensureJsonArray(data_dict.get("voxels")) voxels: Sequence[Point5D] = [ Point5D.from_json_value(raw_voxel) for raw_voxel in raw_voxels ] raw_data = DataSource.from_json_value(data_dict.get("raw_data")) return cls.from_voxels(voxels=voxels, raw_data=raw_data)
def from_json_value(cls, data: JsonValue): data_dict = ensureJsonObject(data) return PrecomputedChunksInfo( type_=ensureJsonString(data_dict.get("type")), data_type=np.dtype(ensureJsonString(data_dict.get("data_type"))), #type: ignore num_channels=ensureJsonInt(data_dict.get("num_channels")), scales=tuple( PrecomputedChunksScale.from_json_value(raw_scale) for raw_scale in ensureJsonArray(data_dict.get("scales")) ) )
def from_json_value(cls, value: JsonValue) -> "JobDescription": value_obj = ensureJsonObject(value) return JobDescription( Name=ensureJsonString(value_obj.get("Name")), Project=ensureJsonString(value_obj.get("Project")), Executable=ensureJsonString(value_obj.get("Executable")), Arguments=tuple( ensureJsonString(arg) for arg in ensureJsonArray(value_obj.get("Arguments"))), Resources=JobResources.from_json_value(value_obj.get("Resources")), Environment={ k: ensureJsonString(v) for k, v in ensureJsonObject(value_obj.get( "Environment")).items() }, Exports=ensureJsonStringArray(value_obj.get("Exports")), Imports=tuple( JobImport.from_json_value(v) for v in ensureJsonArray(value_obj.get("Imports"))), Tags=ensureJsonStringArray(value_obj.get("Tags")), )
def from_json_value(cls, data: JsonValue): data_dict = ensureJsonObject(data) type_ = ensureJsonString(data_dict.get("type")) if type_ != "image": raise ValueError(f"Bad 'type' marker value: {type_}") return PrecomputedChunksInfo( type_=type_, data_type=np.dtype(ensureJsonString(data_dict.get("data_type"))), num_channels=ensureJsonInt(data_dict.get("num_channels")), scales=tuple( PrecomputedChunksScale.from_json_value(raw_scale) for raw_scale in ensureJsonArray(data_dict.get("scales"))))
def from_json_value(cls, value: JsonValue) -> "PrecomputedChunksScale": value_obj = ensureJsonObject(value) return PrecomputedChunksScale( key=Path(ensureJsonString(value_obj.get("key"))), size=ensureJsonIntTripplet(value_obj.get("size")), resolution=ensureJsonIntTripplet(value_obj.get("resolution")), voxel_offset=ensureJsonIntTripplet(value_obj.get("voxel_offset")), chunk_sizes=tuple([ ensureJsonIntTripplet(v) for v in ensureJsonArray(value_obj.get("chunk_sizes")) ]), encoding=PrecomputedChunksEncoder.from_json_value(value_obj.get("encoding")), )
def wait_until_jobs_completed(workflow: PixelClassificationWorkflow, timeout: float = 50): wait_time = 0.5 while timeout > 0: export_status: JsonObject = workflow.export_applet._get_json_state() jobs = ensureJsonArray(export_status["jobs"]) for job in jobs: job_obj = ensureJsonObject(job) num_args = ensureJsonInt(job_obj["num_args"]) num_completed_steps = ensureJsonInt(job_obj["num_completed_steps"]) if num_completed_steps < num_args: print(f"Jobs not done yet. Waiting...") time.sleep(wait_time) timeout -= wait_time break else: return raise TimeoutError("Waiting on jobs timed out!")
def run_rpc(self, *, user_prompt: UserPrompt, method_name: str, arguments: JsonObject) -> Optional[UsageError]: raw_feature_array = ensureJsonArray( arguments.get("feature_extractors")) feature_extractors = [ IlpFilter.from_json_value(raw_feature) for raw_feature in raw_feature_array ] if method_name == "add_feature_extractors": return UsageError.check( self.add_feature_extractors( user_prompt=user_prompt, feature_extractors=feature_extractors)) if method_name == "remove_feature_extractors": return UsageError.check( self.remove_feature_extractors(user_prompt, feature_extractors)) raise ValueError(f"Invalid method name: '{method_name}'")
def run_rpc(self, *, user_prompt: UserPrompt, method_name: str, arguments: JsonObject) -> "UsageError | None": if method_name == "start_export_job": datasource = DataSource.from_json_value( arguments.get("datasource")) datasink = DataSink.from_json_value(arguments.get("datasink")) rpc_result = self.start_export_job(datasource=datasource, datasink=datasink) elif method_name == "start_simple_segmentation_export_job": datasource = DataSource.from_json_value( arguments.get("datasource")) datasinks = [ DataSink.from_json_value(raw_sink) for raw_sink in ensureJsonArray(arguments.get("datasinks")) ] rpc_result = self.start_simple_segmentation_export_job( datasource=datasource, datasinks=datasinks) else: raise ValueError(f"Invalid method name: '{method_name}'") return rpc_result
def _list_objects( self, *, prefix: str, limit: Optional[int] = None ) -> List[Union[BucketObject, BucketSubdir]]: list_objects_path = self.bucket_url.updated_with( extra_search={ "delimiter": "/", "prefix": prefix.lstrip("/"), "limit": str(limit or 50) }) response = self.session.get(list_objects_path.raw) response.raise_for_status() payload_obj = ensureJsonObject(response.json()) raw_objects = ensureJsonArray(payload_obj.get("objects")) items: List[Union[BucketObject, BucketSubdir]] = [] for raw_obj in raw_objects: if "subdir" in ensureJsonObject(raw_obj): items.append(BucketSubdir.from_json_value(raw_obj)) else: items.append(BucketObject.from_json_value(raw_obj)) return items
def _set_json_state(self, state: JsonValue): self.annotations.set_value([ Annotation.from_json_value(raw_annotation) for raw_annotation in ensureJsonArray(state) ], confirmer=lambda msg: True)
def from_json_value(cls, value: JsonValue) -> "OidcClient": value_obj = ensureJsonObject(value) raw_rootUrl = ensureJsonString(value_obj.get("rootUrl")) rootUrl = Url.parse(raw_rootUrl) if rootUrl is None: rootUrl = Url.parse( raw_rootUrl + "/") # it's possible to register a rootUrl without a path -.- assert rootUrl is not None redirectUris: List[Url] = [] for raw_redirect_uri in ensureJsonStringArray( value_obj.get("redirectUris")): try: redirect_uri = Url.parse(raw_redirect_uri) assert redirect_uri is not None redirectUris.append(redirect_uri) except ValueError: uri = rootUrl.joinpath( PurePosixPath(raw_redirect_uri) ) # FIXME: do leading slashes mean root here too? redirectUris.append(uri) baseUrl = Url.parse(ensureJsonString(value_obj.get("baseUrl"))) assert baseUrl is not None return OidcClient( alwaysDisplayInConsole=ensureJsonBoolean( value_obj.get("alwaysDisplayInConsole")), baseUrl=baseUrl, bearerOnly=ensureJsonBoolean(value_obj.get("bearerOnly")), clientAuthenticatorType=ensureJsonString( value_obj.get("clientAuthenticatorType")), clientId=ensureJsonString(value_obj.get("clientId")), consentRequired=ensureJsonBoolean( value_obj.get("consentRequired")), defaultClientScopes=ensureJsonStringArray( value_obj.get("defaultClientScopes")), description=ensureJsonString(value_obj.get("description")), directAccessGrantsEnabled=ensureJsonBoolean( value_obj.get("directAccessGrantsEnabled")), enabled=ensureJsonBoolean(value_obj.get("enabled")), frontchannelLogout=ensureJsonBoolean( value_obj.get("frontchannelLogout")), fullScopeAllowed=ensureJsonBoolean( value_obj.get("fullScopeAllowed")), id=ensureJsonString(value_obj.get("id")), implicitFlowEnabled=ensureJsonBoolean( value_obj.get("implicitFlowEnabled")), name=ensureJsonString(value_obj.get("name")), nodeReRegistrationTimeout=ensureJsonInt( value_obj.get("nodeReRegistrationTimeout")), notBefore=ensureJsonInt(value_obj.get("notBefore")), optionalClientScopes=set( Scope.from_json_value(s) for s in ensureJsonArray( value_obj.get("optionalClientScopes"))), protocol=ensureJsonString(value_obj.get("protocol")), publicClient=ensureJsonBoolean(value_obj.get("publicClient")), redirectUris=tuple(redirectUris), registrationAccessToken=ensureJsonString( value_obj.get("registrationAccessToken")), rootUrl=rootUrl, secret=ensureJsonString(value_obj.get("secret")), serviceAccountsEnabled=ensureJsonBoolean( value_obj.get("serviceAccountsEnabled")), standardFlowEnabled=ensureJsonBoolean( value_obj.get("standardFlowEnabled")), surrogateAuthRequired=ensureJsonBoolean( value_obj.get("surrogateAuthRequired")), webOrigins=ensureJsonStringArray(value_obj.get("webOrigins")), )