def run_rpc(self, *, user_prompt: UserPrompt, method_name: str, arguments: JsonObject) -> Optional[UsageError]: if(method_name == "set_live_update"): live_update = ensureJsonBoolean(arguments.get("live_update")) result = self.set_live_update(user_prompt=user_prompt, live_update=live_update) if isinstance(result, CascadeError): return UsageError(result.message) return None raise ValueError(f"Invalid method name: '{method_name}'")
def run_rpc(self, *, user_prompt: UserPrompt, method_name: str, arguments: JsonObject) -> Optional[UsageError]: raw_feature_array = ensureJsonArray( arguments.get("feature_extractors")) feature_extractors = [ IlpFilter.from_json_value(raw_feature) for raw_feature in raw_feature_array ] if method_name == "add_feature_extractors": return UsageError.check( self.add_feature_extractors( user_prompt=user_prompt, feature_extractors=feature_extractors)) if method_name == "remove_feature_extractors": return UsageError.check( self.remove_feature_extractors(user_prompt, feature_extractors)) raise ValueError(f"Invalid method name: '{method_name}'")
def start_simple_segmentation_export_job( self, *, datasource: DataSource, datasinks: Sequence[DataSink]) -> "UsageError | None": classifier = self._in_operator() if classifier is None: return UsageError("Upstream not ready yet") if len(datasinks) != classifier.num_classes: return UsageError( f"Wrong number of datasinks. Expected {classifier.num_classes} but got {len(datasinks)}" ) expected_shape = datasource.shape.updated(c=3) if any(sink.shape != expected_shape for sink in datasinks): return UsageError( "All data sinks should have 3 channels for this kind of export" ) if any(sink.dtype != np.dtype("uint8") for sink in datasinks): return UsageError( "All data sinks should have dtype of uint8 for this kind of export" ) def launch_export_job(job_id: uuid.UUID, result: "Exception | Sequence[DataSinkWriter]"): if isinstance(result, Exception): raise result self._remove_job(job_id) _ = self._create_job( name=f"Simple Segmentation Export Job", target=ExportAsSimpleSegmentationTask(operator=classifier, sink_writers=result), args=datasource.roi.get_datasource_tiles( ), #FIXME: use sink tile_size num_args=datasource.roi.get_num_tiles( tile_shape=datasource.tile_shape), ) _ = self._create_job( name=f"Creating datasinks", target=_create_datasinks, args=[datasinks], num_args=1, #FIXME: maybe one per datasink? on_success=launch_export_job, )
def start_export_job(self, *, datasource: DataSource, datasink: DataSink) -> "UsageError | None": classifier = self._in_operator() if classifier is None: return UsageError("Upstream not ready yet") expected_shape = datasource.shape.updated(c=classifier.num_classes) if datasink.shape != expected_shape: return UsageError( f"Bad sink shape. Expected {expected_shape} but got {datasink.shape}" ) if datasink.dtype != np.dtype("float32"): return UsageError( "Data sink should have dtype of float32 for this kind of export" ) def launch_export_job(job_id: uuid.UUID, result: "Exception | DataSinkWriter"): if isinstance(result, BaseException): raise result #FIXME? self._remove_job(job_id) _ = self._create_job( name=f"Export Job", target=ExportTask(operator=classifier, sink_writer=result), args=datasource.roi.get_datasource_tiles( ), #FIXME: use sink tile_size num_args=datasource.roi.get_num_tiles( tile_shape=datasource.tile_shape), ) _ = self._create_job( name=f"Creating datasink", target=_create_datasink, args=[datasink], num_args=1, on_success=launch_export_job, # on_failure=lambda exception: self._remove_job(sink_creation_job.uuid) )
def run_rpc(self, *, user_prompt: UserPrompt, method_name: str, arguments: JsonObject) -> "UsageError | None": if method_name == "recolor_label": return UsageError.check( self.recolor_label(user_prompt, label_name=ensureJsonString( arguments.get("label_name")), new_color=Color.from_json_data( arguments.get("new_color")))) if method_name == "rename_label": return UsageError.check( self.rename_label( user_prompt, old_name=ensureJsonString(arguments.get("old_name")), new_name=ensureJsonString(arguments.get("new_name")))) if method_name == "create_label": return UsageError.check( self.create_label( user_prompt=user_prompt, label_name=ensureJsonString(arguments.get("label_name")), color=Color.from_json_data(arguments.get("color")), )) if method_name == "remove_label": return UsageError.check( self.remove_label( user_prompt=user_prompt, label_name=ensureJsonString(arguments.get("label_name")), )) if method_name == "add_annotation": return UsageError.check( self.add_annotation( user_prompt, label_name=ensureJsonString(arguments.get("label_name")), annotation=Annotation.from_json_value( arguments.get("annotation")), )) if method_name == "remove_annotation": return UsageError.check( self.remove_annotation( user_prompt, label_name=ensureJsonString(arguments.get("label_name")), annotation=Annotation.from_json_value( arguments.get("annotation")), )) raise ValueError(f"Invalid method name: '{method_name}'")
def try_from_url(cls, url: Url) -> "HttpFs | UsageError": if url.protocol not in (Protocol.HTTP, Protocol.HTTPS): return UsageError(f"Bad url for HttpFs: {url}") return HttpFs(read_url=url)
def parse_url(url: str) -> Union[Url, UsageError]: parsed_url = Url.parse(url) if parsed_url is None: return UsageError(f"Bad url: {url}") return parsed_url
def from_environment(cls) -> "UserToken | UsageError": access_token = os.environ.get(cls.ENV_VAR_NAME) if access_token is None: return UsageError(f"Environment variable '{cls.ENV_VAR_NAME}' is not set") return UserToken(access_token=access_token)