Exemple #1
0
    def compute_feature(self):
        with progress(total=3) as pbar:
            pbar.set_description("Calculating feature")
            # self.pbar.setValue(25)
            pbar.update(1)
            src_grp = None if self.cmb_source.currentIndex() == 0 else "features"

            src = DataModel.g.dataset_uri(self.cmb_source.value(), group=src_grp)
            logger.info(f"Setting src: {self.cmb_source.value()} ")

            dst = DataModel.g.dataset_uri(self.feature_id, group="features")
            # self.pbar.setValue(50)

            logger.info(f"Setting dst: {self.feature_id}")
            logger.info(f"widgets.items() {self.widgets.items()}")
            pbar.update(1)

            all_params = dict(src=src, dst=dst, modal=True)

            if self.feature_type == "wavelet":
                all_params["wavelet"] = str(self.wavelet_type.value())
                all_params["threshold"] = self.wavelet_threshold.value()

            all_params.update({k: v.value() for k, v in self.widgets.items()})

            logger.info(f"Computing features: {self.feature_type} {all_params}")
            # result = Launcher.g.run("features", self.feature_type, **all_params)
            Launcher.g.run("features", self.feature_type, **all_params)
    def calculate_analyzer(self):
         with progress(total=2) as pbar:
            pbar.set_description("Calculating pipeline")
            pbar.update(1)

            if self.analyzer_type == "label_splitter":
                self.calc_label_splitter()
            elif self.analyzer_type == "find_connected_components":
                self.calc_find_connected_components()    
            elif self.analyzer_type == "level_image_stats":
                self.calc_level_image_stats()
            elif self.analyzer_type == "binary_image_stats":
                self.calc_binary_image_stats()
            elif self.analyzer_type == "image_stats":
                self.calc_image_stats()
            elif self.analyzer_type == "object_stats2":
                self.calc_object_stats2()
            elif self.analyzer_type == "object_stats":
                self.calc_object_stats2()
            elif self.analyzer_type == "object_detection_stats":
                self.calc_object_detection_stats()
            elif self.analyzer_type == "detector_predict":
                self.calc_detector_predict()
            elif self.analyzer_type == "spatial_clustering":
                self.calc_spatial_clustering()
            elif self.analyzer_type == "remove_masked_objects":
                self.calc_removed_masked_objects()

            pbar.update(2)
 def view_regions(self):
     logger.debug(f"Transferring supervoxels {self.svid} to viewer")
     with progress(total=2) as pbar:
         pbar.set_description("Viewing feature")
         pbar.update(1)
         print(f"Current Supervoxels: {cfg.current_supervoxels}")
         cfg.ppw.clientEvent.emit(
             {"source": "superregions", "data": "view_regions", "region_id": self.svid}
         )
         pbar.update(1)
Exemple #4
0
 def view_level(self):
     with progress(total=2) as pbar:
         pbar.set_description("Viewing feature")
         pbar.update(1)
         cfg.ppw.clientEvent.emit({
             "source": "annotations",
             "data": "paint_annotations",
             "level_id": self.level_id,
         })
         pbar.update(1)
Exemple #5
0
 def view_feature(self):
     logger.debug(f"View feature_id {self.feature_id}")
     with progress(total=2) as pbar:
         pbar.set_description("Viewing feature")
         pbar.update(1)
         cfg.ppw.clientEvent.emit(
             {
                 "source": "features",
                 "data": "view_feature",
                 "feature_id": self.feature_id,
             }
         )
         pbar.update(1)    
    def compute_supervoxels(self):

        with progress(total=4) as pbar:
            pbar.set_description("Refreshing")
            pbar.update(1)
                
            # src = [
            #    DataModel.g.dataset_uri("features/" + s) for s in [self.svsource.value()]
            # ]
            src = DataModel.g.dataset_uri(self.svsource.value(), group="features")
            dst = DataModel.g.dataset_uri(self.svid, group="superregions")
            logger.debug(f"Compute sv: Src {src} Dst {dst}")

            from survos2.model import Workspace

            ws = Workspace(DataModel.g.current_workspace)
            num_chunks = np.prod(np.array(ws.metadata()["chunk_grid"]))
            chunk_size = ws.metadata()["chunk_size"]
            logger.debug(
                f"Using chunk_size {chunk_size} to compute number of supervoxel segments for num_chunks: {num_chunks}."
            )

            with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
                src_dataset_shape = DM.sources[0][:].shape

            # n_segments = int(np.prod(chunk_size) // (self.svshape.value() ** 3))
            pbar.update(1)
            n_segments = int(np.prod(src_dataset_shape) / self.svshape.value() ** 3)

            if self.int64_checkbox.value():
                out_dtype = "uint64"
            else:
                out_dtype = "uint32"

            params = dict(
                src=src,
                dst=dst,
                compactness=round(self.svcompactness.value() / 100, 3),
                # shape=self.svshape.value(),
                n_segments=n_segments,
                spacing=self.svspacing.value(),
                modal=True,
                out_dtype=out_dtype,
            )
            logger.debug(f"Compute supervoxels with params {params}")

            pbar.update(1)

            result = Launcher.g.run("superregions", "supervoxels", **params)
            if result is not None:
                pbar.update(1)
 def transfer_layer(msg):
     with progress(total=1) as pbar:
         pbar.set_description("Transferring layer")
         logger.debug(f"transfer_layer {msg}")
         selected_layer = viewer.layers.selection.pop()
         if isinstance(selected_layer, Labels):
             _transfer_labels(selected_layer)
         elif isinstance(selected_layer, Points):
             _transfer_points(selected_layer)
         elif isinstance(selected_layer, Image):
             _transfer_features_http(selected_layer)
         else:
             logger.debug("Unsupported layer type.")
         pbar.update(1)
         processEvents({"data": "refresh"})
Exemple #8
0
    def setup(self):
        with progress(total=len(list_plugins())) as pbar:
            pbar.set_description("Refreshing viewer")
            pbar.update(1)

            for plugin_name in list_plugins():
                pbar.update(1)
                plugin = get_plugin(plugin_name)
                name = plugin["name"]
                title = plugin["title"]
                plugin_cls = plugin["cls"]  # full classname
                tab = plugin["tab"]
                self.pluginContainer.show_plugin(name, tab)

        for l in cfg.viewer.layers:
            cfg.viewer.layers.remove(l)
        cfg.emptying_viewer = False
Exemple #9
0
    def compute_pipeline(self):
        dst = DataModel.g.dataset_uri(self.pipeline_id, group="pipelines")

        with progress(total=3) as pbar:
            pbar.set_description("Calculating pipeline")
            pbar.update(1)
            try:
                if self.pipeline_type == "superregion_segment":
                    all_params = self.setup_params_superregion_segment(dst)
                elif self.pipeline_type == "rasterize_points":
                    all_params = self.setup_params_rasterize_points(dst)
                elif self.pipeline_type == "watershed":
                    all_params = self.setup_params_watershed(dst)
                elif self.pipeline_type == "predict_segmentation_fcn":
                    all_params = self.setup_params_predict_segmentation_fcn(
                        dst)
                elif self.pipeline_type == "label_postprocess":
                    all_params = self.setup_params_label_postprocess(dst)
                elif self.pipeline_type == "cleaning":
                    all_params = self.setup_params_cleaning(dst)
                elif self.pipeline_type == "train_2d_unet":
                    all_params = self.setup_params_train_2d_unet(dst)
                elif self.pipeline_type == "predict_2d_unet":
                    all_params = self.setup_params_predict_2d_unet(dst)
                else:
                    logger.warning(
                        f"No action exists for pipeline: {self.pipeline_type}")

                all_params.update(
                    {k: v.value()
                     for k, v in self.widgets.items()})

                logger.info(
                    f"Computing pipelines {self.pipeline_type} {all_params}")
                try:
                    pbar.update(1)
                    result = Launcher.g.run("pipelines", self.pipeline_type,
                                            **all_params)
                    print(result)
                except Exception as err:
                    print(err)
                if result is not None:
                    pbar.update(1)

            except Exception as e:
                print(e)
Exemple #10
0
    def set_sv(self):
        with progress(total=3) as pbar:
            pbar.set_description("Viewing feature")
            pbar.update(1)
            cfg.current_supervoxels = self.region.value()
            cfg.label_value = self.label.value()
            cfg.brush_size = self.width.value()
            print(f"set_sv {cfg.current_supervoxels}, {cfg.label_value}")

            cfg.three_dim = cfg.three_dim_checkbox.value()

            if cfg.label_value is not None:
                # example 'label_value': {'level': '001_level', 'idx': 2, 'color': '#ff007f'}
                cfg.ppw.clientEvent.emit({
                    "source": "annotations",
                    "data": "set_paint_params",
                    "paint_params": {
                        "current_supervoxels": self.region.value(),
                        "label_value": self.label.value(),
                        "brush_size": self.width.value(),
                        "level_id": self.label.value()["level"],
                    },
                })
                pbar.update(1)
                cfg.ppw.clientEvent.emit({
                    "source":
                    "annotations",
                    "data":
                    "paint_annotations",
                    "level_id":
                    self.label.value()["level"],
                })
                cfg.ppw.clientEvent.emit({
                    "source": "annotations",
                    "data": "set_paint_params",
                    "paint_params": {
                        "current_supervoxels": self.region.value(),
                        "label_value": self.label.value(),
                        "brush_size": self.width.value(),
                        "level_id": self.label.value()["level"],
                    },
                })
            pbar.update(1)
def run_workflow(msg):
    workflow_file = msg["workflow_file"]

    if not os.path.isabs(workflow_file):
        workflow_file = os.path.join(os.getcwd(), workflow_file)

    with open(workflow_file) as f:
        workflows = yaml.safe_load(f.read())

    num_workflow_steps = len(workflows.keys())
    minVal, maxVal = 0, num_workflow_steps
    with progress(total=num_workflow_steps) as pbar:
    
        for step_idx, k in enumerate(workflows):
            workflow = workflows[k]
            action = workflow.pop("action")
            plugin, command = action.split(".")
            params = workflow.pop("params")

            src_name = workflow.pop("src")
            dst_name = workflow.pop("dst")

            if "src_group" in workflow:
                src_group = workflow.pop("src_group")
                src = DataModel.g.dataset_uri(src_name, group=src_group)
            else:
                src = DataModel.g.dataset_uri(src_name, group=plugin)

            dst = DataModel.g.dataset_uri(dst_name, group=plugin)
            all_params = dict(src=src, dst=dst, modal=True)
            all_params.update(params)
            logger.info(f"Executing workflow {all_params}")

            logger.debug(
                f"+ Running {plugin}, {command} on {src}\n to dst {dst} {all_params}\n"
            )

            Launcher.g.run(plugin, command, **all_params)
            pbar.update(1)
    cfg.ppw.clientEvent.emit(
        {"source": "workspace_gui", "data": "refresh", "value": None}
    )
Exemple #12
0
    def view_pipeline(self):
        logger.debug(f"View pipeline_id {self.pipeline_id}")
        with progress(total=2) as pbar:
            pbar.set_description("Viewing feature")
            pbar.update(1)
            if self.annotations_source:
                if self.annotations_source.value():
                    level_id = str(self.annotations_source.value().rsplit(
                        "/", 1)[-1])
                else:
                    level_id = '001_level'
                logger.debug(f"Assigning annotation level {level_id}")

                cfg.ppw.clientEvent.emit({
                    "source": "pipelines",
                    "data": "view_pipeline",
                    "pipeline_id": self.pipeline_id,
                    "level_id": level_id,
                })
            pbar.update(1)
    def view_analyzer(self):
        logger.debug(f"View analyzer_id {self.analyzer_id}")
        with progress(total=2) as pbar:
            pbar.set_description("Viewing analyzer")
            pbar.update(1)
            
            if self.annotations_source:
                if self.annotations_selected:
                    level_id = self.annotations_source.value().rsplit("/", 1)[-1]
                else:
                    level_id = "001_level"
                logger.debug(f"Assigning annotation level {level_id}")

                cfg.ppw.clientEvent.emit(
                    {
                        "source": "analyzer",
                        "data": "view_pipeline",
                        "pipeline_id": self.analyzer_id,
                        "level_id": level_id,
                    }
                )
            
            pbar.update(1)
Exemple #14
0
    def run_clicked(self):
        """Starts SuRVoS2 server and client as subprocesses when 'Run' button pressed.

        Raises:
            Exception: If survos.py not found.
        """
        with progress(total=3) as pbar:
            pbar.set_description("Starting server...")
            pbar.update(1)

        self.ssh_error = (
            False  # Flag which will be set to True if there is an SSH error
        )
        command_dir = os.path.abspath(os.path.dirname(__file__))  # os.getcwd()

        # Set current dir to survos root
        from pathlib import Path

        command_dir = Path(
            command_dir).absolute().parent.parent.parent.resolve()
        os.chdir(command_dir)

        self.script_fullname = os.path.join(command_dir, "survos.py")
        if not os.path.isfile(self.script_fullname):
            raise Exception("{}: Script not found".format(
                self.script_fullname))
        # Retrieve the parameters from the fields TODO: Put some error checking in
        self.run_config["workspace_name"] = self.ws_name_linedt_2.text()
        self.run_config["server_port"] = self.server_port_linedt.text()
        # Temporary measure to check whether the workspace exists or not
        full_ws_path = os.path.join(Config["model.chroot"],
                                    self.run_config["workspace_name"])
        if not os.path.isdir(full_ws_path):
            logger.error(
                f"No workspace can be found at {full_ws_path}, Not starting SuRVoS."
            )
            self.button_feedback_response(
                f"Workspace {self.run_config['workspace_name']} does not appear to exist!",
                self.run_button,
                "maroon",
            )
            return
        pbar.update(1)
        # Try some fancy SSH stuff here
        if self.ssh_button.isChecked():
            self.start_server_over_ssh()
        else:
            self.server_process = subprocess.Popen([
                "python",
                self.script_fullname,
                "start_server",
                self.run_config["workspace_name"],
                self.run_config["server_port"],
                DataModel.g.CHROOT,
            ])
            try:
                outs, errs = self.server_process.communicate(timeout=10)
                print(f"OUTS: {outs, errs}")
            except subprocess.TimeoutExpired:
                pass

            # self.start_client()
            logger.info(f"setting remote: {self.server_port_linedt.text()}")
            remote_ip_port = "127.0.0.1:" + self.server_port_linedt.text()
            logger.info(f"setting remote: {remote_ip_port}")
            resp = Launcher.g.set_remote(remote_ip_port)
            logger.info(f"Response from server to setting remote: {resp}")

            cfg.ppw.clientEvent.emit({
                "source": "server_tab",
                "data": "set_workspace",
                "workspace": self.ws_name_linedt_2.text(),
            })
            cfg.ppw.clientEvent.emit({
                "source": "panel_gui",
                "data": "refresh",
                "value": None
            })
            #cfg.ppw.clientEvent.emit({'data' : 'view_feature', 'feature_id' : '001_raw'})
        pbar.update(1)