Ejemplo n.º 1
0
    def _add_feature_source(self):
        self.feature_source = FeatureComboBox()
        self.feature_source.fill()
        self.feature_source.setMaximumWidth(250)

        widget = HWidgets("Feature:", self.feature_source, Spacing(35), stretch=1)
        self.add_row(widget)
Ejemplo n.º 2
0
    def __init__(self, svid, svname, parent=None):
        super().__init__(
            title=svname, collapsible=True, removable=True, editable=True, parent=parent
        )
        self.svid = svid
        self.svname = svname

        from survos2.frontend.plugins.features import FeatureComboBox

        self.svsource = FeatureComboBox()
        self.svsource.setMaximumWidth(250)
        self.svshape = LineEdit(parse=int, default=10)
        self.svshape.setMaximumWidth(250)
        self.svspacing = LineEdit3D(parse=float, default=1)
        self.svspacing.setMaximumWidth(250)
        self.svcompactness = LineEdit(parse=float, default=20)
        self.svcompactness.setMaximumWidth(250)

        self.int64_checkbox = CheckBox(checked=False)

        self.compute_btn = PushButton("Compute")
        self.view_btn = PushButton("View", accent=True)

        self.add_row(HWidgets("Source:", self.svsource, stretch=1))
        self.add_row(HWidgets("Shape:", self.svshape, stretch=1))
        self.add_row(HWidgets("Spacing:", self.svspacing, stretch=1))
        self.add_row(HWidgets("Compactness:", self.svcompactness, stretch=1))
        self.add_row(HWidgets("Int64:", self.int64_checkbox, stretch=1))

        self.add_row(HWidgets(None, self.compute_btn))
        self.add_row(HWidgets(None, self.view_btn))

        self.compute_btn.clicked.connect(self.compute_supervoxels)
        self.view_btn.clicked.connect(self.view_regions)
Ejemplo n.º 3
0
    def setup_feature_widgets(self):
        feat_group_box = QGroupBox("Features:")
        feat_box_layout = QGridLayout()
        # Labels
        feat_box_layout.addWidget(QLabel("Feature"), 0, 0, 1, 2)
        feat_box_layout.addWidget(QLabel("File type"), 0, 2)
        # Features combo
        self.feat_source = FeatureComboBox()
        feat_box_layout.addWidget(self.feat_source, 1, 0, 1, 2)
        # File type combo
        self.feat_ftype_combo = ComboBox()
        self.add_filetypes_to_combo(self.feat_ftype_combo)
        feat_box_layout.addWidget(self.feat_ftype_combo, 1, 2)
        # Button
        self.feat_export_btn = IconButton("fa.save",
                                          "Export data",
                                          accent=True)
        self.feat_export_btn.clicked.connect(self.save_feature)
        feat_box_layout.addWidget(self.feat_export_btn, 1, 3)

        feat_group_box.setLayout(feat_box_layout)
        return feat_group_box
Ejemplo n.º 4
0
class ExportPlugin(Plugin):

    __icon__ = "fa.qrcode"
    __pname__ = "export"
    __views__ = ["slice_viewer"]
    __tab__ = "export"

    def __init__(self, parent=None):
        super().__init__(parent=parent)
        self.vbox = VBox(self, spacing=10)
        feature_widgets = self.setup_feature_widgets()
        anno_widgets = self.setup_annotation_widgets()
        pipe_widgets = self.setup_pipeline_widgets()
        self.existing_supervoxels = {}
        self.vbox.addWidget(feature_widgets)
        self.vbox.addWidget(anno_widgets)
        self.vbox.addWidget(pipe_widgets)

    def setup_feature_widgets(self):
        feat_group_box = QGroupBox("Features:")
        feat_box_layout = QGridLayout()
        # Labels
        feat_box_layout.addWidget(QLabel("Feature"), 0, 0, 1, 2)
        feat_box_layout.addWidget(QLabel("File type"), 0, 2)
        # Features combo
        self.feat_source = FeatureComboBox()
        feat_box_layout.addWidget(self.feat_source, 1, 0, 1, 2)
        # File type combo
        self.feat_ftype_combo = ComboBox()
        self.add_filetypes_to_combo(self.feat_ftype_combo)
        feat_box_layout.addWidget(self.feat_ftype_combo, 1, 2)
        # Button
        self.feat_export_btn = IconButton("fa.save",
                                          "Export data",
                                          accent=True)
        self.feat_export_btn.clicked.connect(self.save_feature)
        feat_box_layout.addWidget(self.feat_export_btn, 1, 3)

        feat_group_box.setLayout(feat_box_layout)
        return feat_group_box

    def setup_annotation_widgets(self):
        anno_group_box = QGroupBox("Annotations:")
        anno_box_layout = QGridLayout()
        # Labels
        anno_box_layout.addWidget(QLabel("Annotation"), 0, 0, 1, 2)
        anno_box_layout.addWidget(QLabel("File type"), 0, 2)
        # Annotations combo
        self.anno_source = AnnoComboBox()
        anno_box_layout.addWidget(self.anno_source, 1, 0, 1, 2)
        # File type combo
        self.anno_ftype_combo = ComboBox()
        self.add_filetypes_to_combo(self.anno_ftype_combo)
        anno_box_layout.addWidget(self.anno_ftype_combo, 1, 2)
        # Button
        self.anno_export_btn = IconButton("fa.save",
                                          "Export data",
                                          accent=True)
        self.anno_export_btn.clicked.connect(self.save_anno)
        anno_box_layout.addWidget(self.anno_export_btn, 1, 3)

        anno_group_box.setLayout(anno_box_layout)
        return anno_group_box

    def setup_pipeline_widgets(self):
        pipe_group_box = QGroupBox("Pipeline output:")
        pipe_box_layout = QGridLayout()
        # Labels
        pipe_box_layout.addWidget(QLabel("Pipeline"), 0, 0, 1, 2)
        pipe_box_layout.addWidget(QLabel("File type"), 0, 2)
        # Pipeline combo
        self.pipe_source = SuperRegionSegmentComboBox()
        pipe_box_layout.addWidget(self.pipe_source, 1, 0, 1, 2)
        # File type combo
        self.pipe_ftype_combo = ComboBox()
        self.add_filetypes_to_combo(self.pipe_ftype_combo)
        pipe_box_layout.addWidget(self.pipe_ftype_combo, 1, 2)
        # Button
        self.pipe_export_btn = IconButton("fa.save",
                                          "Export data",
                                          accent=True)
        self.pipe_export_btn.clicked.connect(self.save_pipe)
        pipe_box_layout.addWidget(self.pipe_export_btn, 1, 3)

        pipe_group_box.setLayout(pipe_box_layout)
        return pipe_group_box

    def add_filetypes_to_combo(self, combo):
        for file_type in FILE_TYPES:
            combo.addItem(file_type)

    def save_feature(self):
        result = re.search(r"(\d+) (.*)", self.feat_source.currentText())
        if result:
            fid = result.group(1) + "_" + result.group(2).lower().replace(
                " ", "_")
            logger.info(f"Feature ID: {fid}")
            fname_filter, ext = self.get_data_filetype(self.feat_ftype_combo)
            filename = result.group(2).replace(" ", "") + ext
            path, _ = QFileDialog.getSaveFileName(self, "Save Feature",
                                                  filename, fname_filter)
            if path is not None and len(path) > 0:
                feat_data = self.get_arr_data(fid, "features")
                self.save_data(feat_data, path, ext)
        else:
            logger.info("No feature selected")

    def save_anno(self):
        result = re.search(r"(\d+) (.*)", self.anno_source.currentText())
        if result:
            lid = result.group(1) + "_" + result.group(2).lower().replace(
                " ", "_")
            logger.info(f"Label ID: {lid}")
            fname_filter, ext = self.get_data_filetype(self.anno_ftype_combo)
            filename = lid + "Annotations" + ext
            path, _ = QFileDialog.getSaveFileName(self, "Save Annotation",
                                                  filename, fname_filter)
            if path is not None and len(path) > 0:
                anno_data = self.get_arr_data(lid, "annotations")
                anno_data = anno_data & 15
                self.save_data(anno_data.astype(np.uint8), path, ext)
        else:
            logger.info("No annotation selected")

    def save_pipe(self):
        result = re.search(r"(\d+) (\S+) (\S+)",
                           self.pipe_source.currentText())
        if result:
            pid = (result.group(1) + "_" + result.group(2).lower() + "_" +
                   result.group(3).lower())
            logger.info(f"Pipeline ID: {pid}")
            fname_filter, ext = self.get_data_filetype(self.pipe_ftype_combo)
            filename = pid + "Output" + ext
            path, _ = QFileDialog.getSaveFileName(self, "Save Pipeline Output",
                                                  filename, fname_filter)
            if path is not None and len(path) > 0:
                pipe_data = self.get_arr_data(pid, "pipelines")
                self.save_data(pipe_data.astype(np.uint8), path, ext)
        else:
            logger.info("No pipeline selected")

    def get_data_filetype(self, combo_box):
        ftype_map = {
            "HDF5": ("HDF5 (*.h5 *.hdf5)", HDF_EXT),
            "MRC": ("MRC (*.mrc *.rec *.st)", MRC_EXT),
            "TIFF": ("TIFF (*.tif *.tiff)", TIFF_EXT),
        }
        return ftype_map.get(combo_box.currentText())

    def get_arr_data(self, item_id, item_type):
        src = DataModel.g.dataset_uri(item_id, group=item_type)
        with DatasetManager(src, out=None, dtype="uint32", fillvalue=0) as DM:
            src_arr = DM.sources[0][:]
        return src_arr

    def save_data(self, data, path, ext):
        if ext == HDF_EXT:
            logger.info(f"Saving data to {path} in HDF5 format")
            with h5.File(path, "w") as f:
                f["/data"] = data
        elif ext == MRC_EXT:
            logger.info(f"Saving data to {path} in MRC format")
            with mrcfile.new(path, overwrite=True) as mrc:
                mrc.set_data(data)
        elif ext == TIFF_EXT:
            logger.info(f"Saving data to {path} in TIFF format")
            io.imsave(path, data)
Ejemplo n.º 5
0
class PipelineCard(Card):
    def __init__(self, fid, ftype, fname, fparams, parent=None):
        super().__init__(fname,
                         removable=True,
                         editable=True,
                         collapsible=True,
                         parent=parent)
        self.pipeline_id = fid
        self.pipeline_type = ftype
        self.pipeline_name = fname

        #from qtpy.QtWidgets import QProgressBar

        #self.pbar = QProgressBar(self)
        #self.add_row(self.pbar)

        self.params = fparams
        print(fparams)
        self.widgets = dict()

        if self.pipeline_type == "superregion_segment":
            logger.debug("Adding a superregion_segment pipeline")
            self._add_features_source()
            self._add_annotations_source()
            self._add_constrain_source()
            self._add_regions_source()

            self.ensembles = EnsembleWidget()
            self.ensembles.train_predict.connect(self.compute_pipeline)
            self.svm = SVMWidget()
            self.svm.predict.connect(self.compute_pipeline)

            self._add_classifier_choice()
            self._add_projection_choice()
            self._add_param("lam", type="FloatSlider", default=0.15)
            self._add_confidence_choice()

        elif self.pipeline_type == "rasterize_points":
            self._add_annotations_source()
            self._add_feature_source()
            self._add_objects_source()

        elif self.pipeline_type == "watershed":
            self._add_annotations_source()
            self._add_feature_source()

        elif self.pipeline_type == "predict_segmentation_fcn":
            self._add_annotations_source()
            self._add_feature_source()
            self._add_workflow_file()
            self._add_model_type()
            # self._add_patch_params()

        elif self.pipeline_type == "label_postprocess":
            self._add_annotations_source(label="Layer Over: ")
            self._add_annotations_source2(label="Layer Base: ")
            self.label_index = LineEdit(default=-1, parse=int)
            #widget = HWidgets("Selected label:", self.label_index, Spacing(35), stretch=1)
            #self.add_row(widget)
            #self.offset = LineEdit(default=-1, parse=int)
            #widget2 = HWidgets("Offset:", self.offset, Spacing(35), stretch=1)
            #self.add_row(widget2)

        elif self.pipeline_type == "cleaning":
            # self._add_objects_source()
            self._add_feature_source()
            self._add_annotations_source()

        elif self.pipeline_type == "train_2d_unet":
            self._add_annotations_source()
            self._add_feature_source()
            self._add_unet_2d_training_params()

        elif self.pipeline_type == "predict_2d_unet":
            self._add_annotations_source()
            self._add_feature_source()
            self._add_unet_2d_prediction_params()

        else:
            logger.debug(f"Unsupported pipeline type {self.pipeline_type}.")

        for pname, params in fparams.items():
            if pname not in ["src", "dst"]:
                self._add_param(pname, **params)

        self._add_compute_btn()
        self._add_view_btn()

    def _add_model_type(self):
        self.model_type = ComboBox()
        self.model_type.addItem(key="unet3d")
        self.model_type.addItem(key="fpn3d")
        widget = HWidgets("Model type:",
                          self.model_type,
                          Spacing(35),
                          stretch=0)
        self.add_row(widget)

    def _add_patch_params(self):
        self.patch_size = LineEdit3D(default=64, parse=int)
        self.add_row(
            HWidgets("Patch Size:", self.patch_size, Spacing(35), stretch=1))

    def _add_unet_2d_training_params(self):
        self.add_row(HWidgets("Training Parameters:", Spacing(35), stretch=1))
        self.cycles_frozen = LineEdit(default=8, parse=int)
        self.cycles_unfrozen = LineEdit(default=5, parse=int)
        self.add_row(
            HWidgets("No. Cycles Frozen:",
                     self.cycles_frozen,
                     "No. Cycles Unfrozen",
                     self.cycles_unfrozen,
                     stretch=1))

    def _add_unet_2d_prediction_params(self):
        self.model_file_line_edit = LineEdit(default="Filepath", parse=str)
        model_input_btn = PushButton("Select Model", accent=True)
        model_input_btn.clicked.connect(self.get_model_path)
        self.radio_group = QtWidgets.QButtonGroup()
        self.radio_group.setExclusive(True)
        single_pp_rb = QRadioButton("Single plane")
        single_pp_rb.setChecked(True)
        self.radio_group.addButton(single_pp_rb, 1)
        triple_pp_rb = QRadioButton("Three plane")
        self.radio_group.addButton(triple_pp_rb, 3)
        self.add_row(
            HWidgets(self.model_file_line_edit, model_input_btn, Spacing(35)))
        self.add_row(HWidgets("Prediction Parameters:", Spacing(35),
                              stretch=1))
        self.add_row(HWidgets(single_pp_rb, triple_pp_rb, stretch=1))

    def _add_workflow_file(self):
        self.filewidget = FileWidget(extensions="*.pt", save=False)
        self.add_row(self.filewidget)
        self.filewidget.path_updated.connect(self.load_data)

    def load_data(self, path):
        self.model_fullname = path
        print(f"Setting model fullname: {self.model_fullname}")

    def _add_view_btn(self):
        view_btn = PushButton("View", accent=True)
        view_btn.clicked.connect(self.view_pipeline)
        load_as_annotation_btn = PushButton("Load as annotation", accent=True)
        load_as_annotation_btn.clicked.connect(self.load_as_annotation)
        load_as_float_btn = PushButton("Load as image", accent=True)
        load_as_float_btn.clicked.connect(self.load_as_float)
        self.add_row(
            HWidgets(None, load_as_float_btn, load_as_annotation_btn, view_btn,
                     Spacing(35)))

    def _add_refine_choice(self):
        self.refine_checkbox = CheckBox(checked=True)
        self.add_row(
            HWidgets("MRF Refinement:",
                     self.refine_checkbox,
                     Spacing(35),
                     stretch=0))

    def _add_confidence_choice(self):
        self.confidence_checkbox = CheckBox(checked=False)
        self.add_row(
            HWidgets("Confidence Map as Feature:",
                     self.confidence_checkbox,
                     Spacing(35),
                     stretch=0))

    def _add_objects_source(self):
        self.objects_source = ObjectComboBox(full=True)
        self.objects_source.fill()
        self.objects_source.setMaximumWidth(250)

        widget = HWidgets("Objects:",
                          self.objects_source,
                          Spacing(35),
                          stretch=1)
        self.add_row(widget)

    def _add_classifier_choice(self):
        self.classifier_type = ComboBox()
        self.classifier_type.addItem(key="Ensemble")
        self.classifier_type.addItem(key="SVM")
        widget = HWidgets("Classifier:",
                          self.classifier_type,
                          Spacing(35),
                          stretch=0)

        self.classifier_type.currentIndexChanged.connect(
            self._on_classifier_changed)

        self.clf_container = QtWidgets.QWidget()
        clf_vbox = VBox(self, spacing=4)
        clf_vbox.setContentsMargins(0, 0, 0, 0)
        self.clf_container.setLayout(clf_vbox)

        self.add_row(widget)
        self.add_row(self.clf_container, max_height=500)
        self.clf_container.layout().addWidget(self.ensembles)

    def _on_classifier_changed(self, idx):
        if idx == 0:
            self.clf_container.layout().addWidget(self.ensembles)
            self.svm.setParent(None)
        elif idx == 1:
            self.clf_container.layout().addWidget(self.svm)
            self.ensembles.setParent(None)

    def _add_projection_choice(self):
        self.projection_type = ComboBox()
        self.projection_type.addItem(key="None")
        self.projection_type.addItem(key="pca")
        self.projection_type.addItem(key="rbp")
        self.projection_type.addItem(key="rproj")
        self.projection_type.addItem(key="std")
        widget = HWidgets("Projection:",
                          self.projection_type,
                          Spacing(35),
                          stretch=0)
        self.add_row(widget)

    def _add_feature_source(self):
        self.feature_source = FeatureComboBox()
        self.feature_source.fill()
        self.feature_source.setMaximumWidth(250)

        widget = HWidgets("Feature:",
                          self.feature_source,
                          Spacing(35),
                          stretch=1)
        self.add_row(widget)

    def _add_features_source(self):
        self.features_source = MultiSourceComboBox()
        self.features_source.fill()
        self.features_source.setMaximumWidth(250)
        cfg.pipelines_features_source = self.features_source
        widget = HWidgets("Features:",
                          self.features_source,
                          Spacing(35),
                          stretch=1)
        self.add_row(widget)

    def _add_constrain_source(self):
        print(self.annotations_source.value())
        self.constrain_mask_source = AnnotationComboBox(header=(None, "None"),
                                                        full=True)
        self.constrain_mask_source.fill()
        self.constrain_mask_source.setMaximumWidth(250)

        widget = HWidgets("Constrain mask:",
                          self.constrain_mask_source,
                          Spacing(35),
                          stretch=1)
        self.add_row(widget)

    def _add_annotations_source(self, label="Annotation"):
        self.annotations_source = LevelComboBox(full=True)
        self.annotations_source.fill()
        self.annotations_source.setMaximumWidth(250)

        widget = HWidgets(label,
                          self.annotations_source,
                          Spacing(35),
                          stretch=1)

        self.add_row(widget)

    def _add_annotations_source2(self, label="Annotation 2"):
        self.annotations_source2 = LevelComboBox(full=True)
        self.annotations_source2.fill()
        self.annotations_source2.setMaximumWidth(250)

        widget = HWidgets(label,
                          self.annotations_source2,
                          Spacing(35),
                          stretch=1)
        self.add_row(widget)

    def _add_pipelines_source(self):
        self.pipelines_source = PipelinesComboBox()
        self.pipelines_source.fill()
        self.pipelines_source.setMaximumWidth(250)
        widget = HWidgets("Segmentation:",
                          self.pipelines_source,
                          Spacing(35),
                          stretch=1)
        self.add_row(widget)

    def _add_regions_source(self):
        self.regions_source = RegionComboBox(full=True)  # SourceComboBox()
        self.regions_source.fill()
        self.regions_source.setMaximumWidth(250)

        widget = HWidgets("Superregions:",
                          self.regions_source,
                          Spacing(35),
                          stretch=1)
        cfg.pipelines_regions_source = self.regions_source
        self.add_row(widget)

    def _add_param(self, name, title=None, type="String", default=None):
        if type == "Int":
            p = LineEdit(default=0, parse=int)
        elif type == "FloatSlider":
            p = RealSlider(value=0.0, vmax=1, vmin=0)
            title = "MRF Refinement Amount:"
        elif type == "Float":
            p = LineEdit(default=0.0, parse=float)
            title = title
        elif type == "FloatOrVector":
            p = LineEdit3D(default=0, parse=float)
        elif type == "IntOrVector":
            p = LineEdit3D(default=0, parse=int)
        elif type == "SmartBoolean":
            p = CheckBox(checked=True)
        else:
            p = None

        if title is None:
            title = name

        if p:
            self.widgets[name] = p
            self.add_row(HWidgets(None, title, p, Spacing(35)))

    def _add_compute_btn(self):
        compute_btn = PushButton("Compute", accent=True)
        compute_btn.clicked.connect(self.compute_pipeline)
        self.add_row(HWidgets(None, compute_btn, Spacing(35)))

    def update_params(self, params):
        logger.debug(f"Pipeline update params {params}")
        for k, v in params.items():
            if k in self.widgets:
                self.widgets[k].setValue(v)
        if "anno_id" in params:
            if params["anno_id"] is not None:

                self.annotations_source.select(
                    os.path.join("annotations/", params["anno_id"]))
        if "object_id" in params:
            if params["object_id"] is not None:
                self.objects_source.select(
                    os.path.join("objects/", params["object_id"]))
        if "feature_id" in params:
            for source in params["feature_id"]:
                self.feature_source.select(os.path.join("features/", source))
        if "feature_ids" in params:
            for source in params["feature_ids"]:
                self.features_source.select(os.path.join("features/", source))
        if "region_id" in params:
            if params["region_id"] is not None:
                self.regions_source.select(
                    os.path.join("regions/", params["region_id"]))
        if "constrain_mask" in params:
            if (params["constrain_mask"] is not None
                    and params["constrain_mask"] != "None"):
                import ast

                constrain_mask_dict = ast.literal_eval(
                    params["constrain_mask"])
                print(constrain_mask_dict)

                constrain_mask_source = (constrain_mask_dict["level"] + ":" +
                                         str(constrain_mask_dict["idx"]))
                print(f"Constrain mask source {constrain_mask_source}")
                self.constrain_mask_source.select(constrain_mask_source)

    def card_deleted(self):
        params = dict(pipeline_id=self.pipeline_id, workspace=True)
        result = Launcher.g.run("pipelines", "remove", **params)
        if result["done"]:
            self.setParent(None)
            _PipelineNotifier.notify()

        cfg.ppw.clientEvent.emit({
            "source": "pipelines",
            "data": "remove_layer",
            "layer_name": self.pipeline_id,
        })

    def view_pipeline(self):
        logger.debug(f"View pipeline_id {self.pipeline_id}")
        with progress(total=2) as pbar:
            pbar.set_description("Viewing feature")
            pbar.update(1)
            if self.annotations_source:
                if self.annotations_source.value():
                    level_id = str(self.annotations_source.value().rsplit(
                        "/", 1)[-1])
                else:
                    level_id = '001_level'
                logger.debug(f"Assigning annotation level {level_id}")

                cfg.ppw.clientEvent.emit({
                    "source": "pipelines",
                    "data": "view_pipeline",
                    "pipeline_id": self.pipeline_id,
                    "level_id": level_id,
                })
            pbar.update(1)

    def get_model_path(self):
        workspace_path = os.path.join(DataModel.g.CHROOT,
                                      DataModel.g.current_workspace)
        self.model_path, _ = QtWidgets.QFileDialog.getOpenFileName(
            self, ("Select model"), workspace_path, ("Model files (*.zip)"))
        self.model_file_line_edit.setValue(self.model_path)

    def load_as_float(self):
        logger.debug(f"Loading prediction {self.pipeline_id} as float image.")

        # get pipeline output
        src = DataModel.g.dataset_uri(self.pipeline_id, group="pipelines")
        with DatasetManager(src, out=None, dtype="uint32", fillvalue=0) as DM:
            src_arr = DM.sources[0][:]
        # create new float image
        params = dict(feature_type="raw", workspace=True)
        result = Launcher.g.run("features", "create", **params)

        if result:
            fid = result["id"]
            ftype = result["kind"]
            fname = result["name"]
            logger.debug(
                f"Created new object in workspace {fid}, {ftype}, {fname}")

            dst = DataModel.g.dataset_uri(fid, group="features")
            with DatasetManager(dst, out=dst, dtype="float32",
                                fillvalue=0) as DM:
                DM.out[:] = src_arr

            cfg.ppw.clientEvent.emit({
                "source": "workspace_gui",
                "data": "refresh",
                "value": None
            })

    def load_as_annotation(self):
        logger.debug(f"Loading prediction {self.pipeline_id} as annotation.")

        # get pipeline output
        src = DataModel.g.dataset_uri(self.pipeline_id, group="pipelines")
        with DatasetManager(src, out=None, dtype="uint32", fillvalue=0) as DM:
            src_arr = DM.sources[0][:]
        label_values = np.unique(src_arr)

        # create new level
        params = dict(level=self.pipeline_id, workspace=True)
        result = Launcher.g.run("annotations", "add_level", workspace=True)

        # create a blank label for each unique value in the pipeline output array
        if result:
            level_id = result["id"]

            for v in label_values:
                params = dict(
                    level=level_id,
                    idx=int(v),
                    name=str(v),
                    color="#11FF11",
                    workspace=True,
                )
                label_result = Launcher.g.run("annotations", "add_label",
                                              **params)

            params = dict(
                level=str(self.annotations_source.value().rsplit("/", 1)[-1]),
                workspace=True,
            )
            anno_result = Launcher.g.run("annotations", "get_levels",
                                         **params)[0]

            params = dict(level=str(level_id), workspace=True)
            level_result = Launcher.g.run("annotations", "get_levels",
                                          **params)[0]

            try:
                # set the new level color mapping to the mapping from the pipeline
                for v in level_result["labels"].keys():
                    if v in anno_result["labels"]:
                        label_hex = anno_result["labels"][v]["color"]
                        label = dict(
                            idx=int(v),
                            name=str(v),
                            color=label_hex,
                        )
                        params = dict(level=result["id"], workspace=True)
                        label_result = Launcher.g.run("annotations",
                                                      "update_label", **params,
                                                      **label)
            except Exception as err:
                logger.debug(f"Exception {err}")

            fid = result["id"]
            ftype = result["kind"]
            fname = result["name"]
            logger.debug(
                f"Created new object in workspace {fid}, {ftype}, {fname}")

            # set levels array to pipeline output array
            dst = DataModel.g.dataset_uri(fid, group="annotations")
            with DatasetManager(dst, out=dst, dtype="uint32",
                                fillvalue=0) as DM:
                DM.out[:] = src_arr

            cfg.ppw.clientEvent.emit({
                "source": "workspace_gui",
                "data": "refresh",
                "value": None
            })

    def setup_params_superregion_segment(self, dst):
        feature_names_list = [
            n.rsplit("/", 1)[-1] for n in self.features_source.value()
        ]
        src_grp = None if self.annotations_source.currentIndex(
        ) == 0 else "pipelines"
        src = DataModel.g.dataset_uri(
            self.annotations_source.value().rsplit("/", 1)[-1],
            group="annotations",
        )
        all_params = dict(src=src, modal=True)
        all_params["workspace"] = DataModel.g.current_workspace

        logger.info(f"Setting src to {self.annotations_source.value()} ")
        all_params["region_id"] = str(self.regions_source.value().rsplit(
            "/", 1)[-1])
        all_params["feature_ids"] = feature_names_list
        all_params["anno_id"] = str(self.annotations_source.value().rsplit(
            "/", 1)[-1])
        if self.constrain_mask_source.value() != None:
            all_params["constrain_mask"] = self.constrain_mask_source.value(
            )  # .rsplit("/", 1)[-1]
        else:
            all_params["constrain_mask"] = "None"

        all_params["dst"] = dst
        all_params["refine"] = self.widgets["refine"].value()
        all_params["lam"] = self.widgets["lam"].value()
        all_params["classifier_type"] = self.classifier_type.value()
        all_params["projection_type"] = self.projection_type.value()
        all_params["confidence"] = self.confidence_checkbox.value()

        if self.classifier_type.value() == "Ensemble":
            all_params["classifier_params"] = self.ensembles.get_params()
        else:
            all_params["classifier_params"] = self.svm.get_params()
        return all_params

    def setup_params_rasterize_points(self, dst):
        src = DataModel.g.dataset_uri(self.feature_source.value(),
                                      group="features")
        all_params = dict(src=src, modal=True)
        all_params["workspace"] = DataModel.g.current_workspace
        # all_params["anno_id"] = str(
        #    self.annotations_source.value().rsplit("/", 1)[-1]
        # )
        all_params["feature_id"] = self.feature_source.value()
        all_params["object_id"] = str(self.objects_source.value())
        all_params["acwe"] = self.widgets["acwe"].value()
        # all_params["object_scale"] = self.widgets["object_scale"].value()
        # all_params["object_offset"] = self.widgets["object_offset"].value()
        all_params["dst"] = dst
        return all_params

    def setup_params_watershed(self, dst):
        src = DataModel.g.dataset_uri(self.feature_source.value(),
                                      group="features")
        all_params = dict(src=src, dst=dst, modal=True)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["dst"] = self.pipeline_id
        all_params["anno_id"] = str(self.annotations_source.value().rsplit(
            "/", 1)[-1])
        return all_params

    def setup_params_predict_segmentation_fcn(self, dst):
        src = DataModel.g.dataset_uri(self.feature_source.value(),
                                      group="features")
        all_params = dict(src=src, dst=dst, modal=True)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["anno_id"] = str(self.annotations_source.value().rsplit(
            "/", 1)[-1])
        all_params["feature_id"] = self.feature_source.value()
        all_params["model_fullname"] = self.model_fullname
        all_params["model_type"] = self.model_type.value()
        all_params["dst"] = self.pipeline_id
        return all_params

    def setup_params_label_postprocess(self, dst):
        all_params = dict(modal=True)
        all_params["workspace"] = DataModel.g.current_workspace

        print(self.annotations_source.value())

        if (self.annotations_source.value()):
            all_params["level_over"] = str(
                self.annotations_source.value().rsplit("/", 1)[-1])
        else:
            all_params["level_over"] = "None"
        all_params["level_base"] = str(self.annotations_source2.value().rsplit(
            "/", 1)[-1])
        all_params["dst"] = dst

        #all_params["selected_label"] = int(self.label_index.value())
        #all_params["offset"] = int(self.offset.value())
        all_params["selected_label"] = int(
            self.widgets["selected_label"].value())
        all_params["offset"] = int(self.widgets["offset"].value())
        return all_params

    def setup_params_cleaning(self, dst):
        all_params = dict(dst=dst, modal=True)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["feature_id"] = str(self.feature_source.value())
        # all_params["object_id"] = str(self.objects_source.value())
        return all_params

    def setup_params_train_2d_unet(self, dst):
        src = DataModel.g.dataset_uri(self.feature_source.value(),
                                      group="features")
        all_params = dict(src=src, dst=dst, modal=True)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["feature_id"] = str(self.feature_source.value())
        all_params["anno_id"] = str(self.annotations_source.value().rsplit(
            "/", 1)[-1])
        all_params["unet_train_params"] = dict(
            cyc_frozen=self.cycles_frozen.value(),
            cyc_unfrozen=self.cycles_unfrozen.value())
        return all_params

    def setup_params_predict_2d_unet(self, dst):
        src = DataModel.g.dataset_uri(self.feature_source.value(),
                                      group="features")
        all_params = dict(src=src, dst=dst, modal=True)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["feature_id"] = str(self.feature_source.value())
        all_params["anno_id"] = str(self.annotations_source.value().rsplit(
            "/", 1)[-1])
        all_params["model_path"] = str(self.model_file_line_edit.value())
        all_params["no_of_planes"] = self.radio_group.checkedId()
        return all_params

    def compute_pipeline(self):
        dst = DataModel.g.dataset_uri(self.pipeline_id, group="pipelines")

        with progress(total=3) as pbar:
            pbar.set_description("Calculating pipeline")
            pbar.update(1)
            try:
                if self.pipeline_type == "superregion_segment":
                    all_params = self.setup_params_superregion_segment(dst)
                elif self.pipeline_type == "rasterize_points":
                    all_params = self.setup_params_rasterize_points(dst)
                elif self.pipeline_type == "watershed":
                    all_params = self.setup_params_watershed(dst)
                elif self.pipeline_type == "predict_segmentation_fcn":
                    all_params = self.setup_params_predict_segmentation_fcn(
                        dst)
                elif self.pipeline_type == "label_postprocess":
                    all_params = self.setup_params_label_postprocess(dst)
                elif self.pipeline_type == "cleaning":
                    all_params = self.setup_params_cleaning(dst)
                elif self.pipeline_type == "train_2d_unet":
                    all_params = self.setup_params_train_2d_unet(dst)
                elif self.pipeline_type == "predict_2d_unet":
                    all_params = self.setup_params_predict_2d_unet(dst)
                else:
                    logger.warning(
                        f"No action exists for pipeline: {self.pipeline_type}")

                all_params.update(
                    {k: v.value()
                     for k, v in self.widgets.items()})

                logger.info(
                    f"Computing pipelines {self.pipeline_type} {all_params}")
                try:
                    pbar.update(1)
                    result = Launcher.g.run("pipelines", self.pipeline_type,
                                            **all_params)
                    print(result)
                except Exception as err:
                    print(err)
                if result is not None:
                    pbar.update(1)

            except Exception as e:
                print(e)

    def card_title_edited(self, newtitle):
        params = dict(pipeline_id=self.pipeline_id,
                      new_name=newtitle,
                      workspace=True)
        result = Launcher.g.run("pipelines", "rename", **params)

        if result["done"]:
            _PipelineNotifier.notify()

        return result["done"]
Ejemplo n.º 6
0
class AnalyzerCard(Card):
    def __init__(self, analyzer_id, analyzer_name, analyzer_type, parent=None):
        super().__init__(
            title=analyzer_name,
            collapsible=True,
            removable=True,
            editable=True,
            parent=parent,
        )
        self.analyzer_id = analyzer_id
        self.analyzer_name = analyzer_name
        self.analyzer_type = analyzer_type
        self.annotations_source = (
            "001_level"  # default annotation level to use for labels
        )
        additional_buttons = []
        self.annotations_selected = False
        self.op_cards = []

        if self.analyzer_type == "label_splitter":
            # radio buttons to select source type
            self.radio_group = QtWidgets.QButtonGroup()
            self.radio_group.setExclusive(True)
            pipelines_rb = QRadioButton("Pipelines")
            pipelines_rb.setChecked(True)
            pipelines_rb.toggled.connect(self._pipelines_rb_checked)
            self.radio_group.addButton(pipelines_rb, 1)
            analyzers_rb = QRadioButton("Analyzers")
            analyzers_rb.toggled.connect(self._analyzers_rb_checked)
            self.radio_group.addButton(analyzers_rb, 2)
            annotations_rb = QRadioButton("Annotation")
            self.radio_group.addButton(annotations_rb, 3)
            annotations_rb.toggled.connect(self._annotations_rb_checked)
            self.add_row(HWidgets(pipelines_rb, analyzers_rb, annotations_rb))
            
            self.source_container = QtWidgets.QWidget()
            source_vbox = VBox(self, spacing=4)
            source_vbox.setContentsMargins(0, 0, 0, 0)
            self.source_container.setLayout(source_vbox)
            self.add_row(self.source_container)
            self.pipelines_source = PipelinesComboBox()
            self.pipelines_source.fill()
            self.pipelines_source.setMaximumWidth(250)
            self.pipelines_widget = HWidgets(
            "Segmentation:", self.pipelines_source, Spacing(35), stretch=1
            )
            self.pipelines_widget.setParent(None)

            self.annotations_source = LevelComboBox(full=True)
            self.annotations_source.fill()
            self.annotations_source.setMaximumWidth(250)
            self.annotations_widget = HWidgets("Annotation", self.annotations_source, Spacing(35), stretch=1)
            self.annotations_widget.setParent(None)

            self.analyzers_source = AnalyzersComboBox()
            self.analyzers_source.fill()
            self.analyzers_source.setMaximumWidth(250)
            self.analyzers_widget = HWidgets(
                "Analyzers:", self.analyzers_source, Spacing(35), stretch=1
            )
            self.analyzers_widget.setParent(None)

            self.source_container.layout().addWidget(self.pipelines_widget)
            self.current_widget = self.pipelines_widget

            
            self._add_feature_source()
 

            self.background_label = LineEdit(default=0, parse=float)
            widget = HWidgets("Background label:", self.background_label, Spacing(35), stretch=1)
            self.add_row(widget)

            self.add_rules_btn = PushButton("Add Rule")
            self.add_rules_btn.clicked.connect(self._add_rule)
            self.refresh_rules_btn = PushButton("Refresh plots")
            self.refresh_rules_btn.clicked.connect(self._setup_ops)

            widget = HWidgets(self.add_rules_btn, self.refresh_rules_btn,Spacing(35),stretch=0)
            self.add_row(widget)

            self.export_csv_btn = PushButton("Export CSV")
            self.load_as_objects_btn = PushButton("Load as Objects")

            self.add_row(HWidgets(None, self.load_as_objects_btn, self.export_csv_btn, Spacing(35)))
            self.load_as_objects_btn.clicked.connect(self.load_as_objects)
            self.export_csv_btn.clicked.connect(self.export_csv)

            self.feature_name_combo_box = SimpleComboBox(
            full=True, values=feature_names
            )
            self.feature_name_combo_box.fill()
                       
            self.add_row(HWidgets("Explore feature name: ", self.feature_name_combo_box, Spacing(35),stretch=0))
            self._add_view_btn() 
        elif self.analyzer_type == "image_stats":
            self._add_features_source()
            self.plot_btn = PushButton("Plot")
            additional_buttons.append(self.plot_btn)
            self.plot_btn.clicked.connect(self.clustering_plot)
        elif self.analyzer_type == "level_image_stats":
            self._add_annotations_source()
            self.statistic_name_combo_box = SimpleComboBox(
                full=True, values=["Mean", "Std", "Var"]
            )
            widget = HWidgets(
                "Statistic:", self.statistic_name_combo_box, Spacing(35), stretch=1
            )
            self.add_row(widget)
            self.label_index = LineEdit(default=1, parse=float)
            widget = HWidgets("Level index:", self.label_index, Spacing(35), stretch=1)
            self.add_row(widget)
        elif self.analyzer_type == "binary_image_stats":
            self._add_feature_source()
            self.threshold = LineEdit(default=0.5, parse=float)
            widget = HWidgets("Threshold:", self.threshold, Spacing(35), stretch=1)
            self.add_row(widget)
            self.load_as_objects_btn = PushButton("Load as Objects")
            additional_buttons.append(self.load_as_objects_btn)
            self.load_as_objects_btn.clicked.connect(self.load_as_objects)
        elif self.analyzer_type == "object_stats":
            self._add_features_source()
            self._add_objects_source()
            self.stat_name_combo_box = SimpleComboBox(
                full=True, values=["Mean", "Std", "Var"]
            )
            self.stat_name_combo_box.fill()
            widget = HWidgets(
                "Statistic name:", self.stat_name_combo_box, Spacing(35), stretch=1
            )
            self.add_row(widget)
        elif self.analyzer_type == "object_detection_stats":
            self._add_features_source()
            self._add_object_detection_stats_source()
        elif self.analyzer_type == "find_connected_components":
            additional_buttons.append(self._add_pipelines_source2())
            self.label_index = LineEdit(default=0, parse=int)
            widget = HWidgets("Label Index:", self.label_index, Spacing(35), stretch=1)
            self.add_row(widget)
            self.load_as_objects_btn = additional_buttons[-1]
            self.load_as_objects_btn.clicked.connect(self.load_as_objects)
            self._add_view_btn()
        elif self.analyzer_type == "detector_predict":
            self._add_features_source()
            self._add_objects_source()
            self._add_model_file()
        elif self.analyzer_type == "remove_masked_objects":
            self._add_feature_source()
            self._add_objects_source()
            self.load_as_objects_btn = PushButton("Load as Objects")
            additional_buttons.append(self.load_as_objects_btn)
            self.load_as_objects_btn.clicked.connect(self.load_as_objects)
        elif self.analyzer_type == "spatial_clustering":
            self._add_feature_source()
            self._add_objects_source()
            self.eps = LineEdit(default=0.1, parse=float)
            widget = HWidgets("DBScan EPS:", self.eps, Spacing(35), stretch=1)
            self.add_row(widget)
            self.load_as_objects_btn = PushButton("Load as Objects")
            additional_buttons.append(self.load_as_objects_btn)
            self.load_as_objects_btn.clicked.connect(self.load_as_objects)
        self.calc_btn = PushButton("Compute")
        self.add_row(HWidgets(None, self.calc_btn, Spacing(35)))
        if len(additional_buttons) > 0:
            self.add_row(HWidgets(None, *additional_buttons, Spacing(35)))
        self.calc_btn.clicked.connect(self.calculate_analyzer)
        self.table_control = None
        self.plots = []
    def _pipelines_rb_checked(self, enabled):
        if enabled:
            self.source_container.layout().addWidget(self.pipelines_widget)
            if self.current_widget:
                self.current_widget.setParent(None)
            self.current_widget = self.pipelines_widget
            self.annotations_selected = False
    def _analyzers_rb_checked(self, enabled):
        if enabled:
            self.source_container.layout().addWidget(self.analyzers_widget)
            if self.current_widget:
                self.current_widget.setParent(None)
            self.current_widget = self.analyzers_widget
            self.annotations_selected = False
    def _annotations_rb_checked(self, enabled):
        if enabled:
            self.source_container.layout().addWidget(self.annotations_widget)
            if self.current_widget:
                self.current_widget.setParent(None)
            self.current_widget = self.annotations_widget
            self.annotations_selected = True
    def _setup_ops(self):
        print(f"Current number of op cards {len(self.op_cards)}")      
        if self.table_control:
            self.table_control.w.setParent(None)
            self.table_control = None
        if len(self.plots) > 0:
            for plot in self.plots:
                plot.setParent(None)
                plot = None
            self.plots = []
        #self.clear_widgets()
    def _add_rule(self):
        op_card = RuleCard(title="Rule", editable=True, collapsible=False, removable=True, parent=self)
        self.op_cards.append(op_card)
        self.add_row(op_card)
        self.add_to_widget_list(op_card)
    
    def _add_model_file(self):
        self.filewidget = FileWidget(extensions="*.pt", save=False)
        self.add_row(self.filewidget)
        self.filewidget.path_updated.connect(self.load_data)

    def load_data(self, path):
        self.model_fullname = path
        print(f"Setting model fullname: {self.model_fullname}")

    def load_as_objects(self):
        logger.debug("Load analyzer result as objects")
        from survos2.entity.entities import load_entities_via_file

        load_entities_via_file(self.entities_arr, flipxy=False)
        cfg.ppw.clientEvent.emit(
            {"source": "analyzer_plugin", "data": "refresh", "value": None}
        )

    def _add_objects_source(self):
        self.objects_source = ObjectComboBox(full=True)
        self.objects_source.fill()
        self.objects_source.setMaximumWidth(250)
        widget = HWidgets("Objects:", self.objects_source, Spacing(35), stretch=1)
        self.add_row(widget)

    def _add_object_detection_stats_source(self):
        self.gold_objects_source = ObjectComboBox(full=True)
        self.gold_objects_source.fill()
        self.gold_objects_source.setMaximumWidth(250)
        widget = HWidgets(
            "Gold Objects:", self.gold_objects_source, Spacing(35), stretch=1
        )
        self.add_row(widget)

        self.predicted_objects_source = ObjectComboBox(full=True)
        self.predicted_objects_source.fill()
        self.predicted_objects_source.setMaximumWidth(250)
        widget = HWidgets(
            "Predicted Objects:", self.predicted_objects_source, Spacing(35), stretch=1
        )
        self.add_row(widget)

    def _add_annotations_source(self, label="Annotation"):
        self.annotations_source = LevelComboBox(full=True)
        self.annotations_source.fill()
        self.annotations_source.setMaximumWidth(250)

        widget = HWidgets(label, self.annotations_source, Spacing(35), stretch=1)

        self.add_row(widget)

    def _add_pipelines_source(self):
        self.pipelines_source = PipelinesComboBox()
        self.pipelines_source.fill()
        self.pipelines_source.setMaximumWidth(250)
        widget = HWidgets(
            "Segmentation:", self.pipelines_source, Spacing(35), stretch=1
        )
        self.add_row(widget)


    def _add_pipelines_source2(self):
        self.pipelines_source = PipelinesComboBox()
        self.pipelines_source.fill()
        self.pipelines_source.setMaximumWidth(250)
        widget = HWidgets(
            "Segmentation:", self.pipelines_source, Spacing(35), stretch=1
        )
        self.add_row(widget)
        load_as_objects = PushButton("Load as Objects")
        return load_as_objects

    def _add_analyzers_source(self):
        self.analyzers_source = AnalyzersComboBox()
        self.analyzers_source.fill()
        self.analyzers_source.setMaximumWidth(250)
        widget = HWidgets(
            "Analyzers:", self.analyzers_source, Spacing(35), stretch=1
        )
        self.add_row(widget)


    def _add_feature_source(self):
        self.feature_source = FeatureComboBox()
        self.feature_source.fill()
        self.feature_source.setMaximumWidth(250)

        widget = HWidgets("Feature:", self.feature_source, Spacing(35), stretch=1)
        self.add_row(widget)

    def _add_features_source(self):
        self.features_source = MultiSourceComboBox()
        self.features_source.fill()
        self.features_source.setMaximumWidth(250)

        widget = HWidgets("Features:", self.features_source, Spacing(35), stretch=1)
        self.add_row(widget)

    def _add_view_btn(self):
        view_btn = PushButton("View", accent=True)
        view_btn.clicked.connect(self.view_analyzer)
        load_as_annotation_btn = PushButton("Load as annotation", accent=True)
        load_as_annotation_btn.clicked.connect(self.load_as_annotation)
        load_as_float_btn = PushButton("Load as feature", accent=True)
        load_as_float_btn.clicked.connect(self.load_as_float)
        self.add_row(
            HWidgets(
                None, load_as_float_btn, load_as_annotation_btn, view_btn, Spacing(35)
            )
        )

    def view_analyzer(self):
        logger.debug(f"View analyzer_id {self.analyzer_id}")
        with progress(total=2) as pbar:
            pbar.set_description("Viewing analyzer")
            pbar.update(1)
            
            if self.annotations_source:
                if self.annotations_selected:
                    level_id = self.annotations_source.value().rsplit("/", 1)[-1]
                else:
                    level_id = "001_level"
                logger.debug(f"Assigning annotation level {level_id}")

                cfg.ppw.clientEvent.emit(
                    {
                        "source": "analyzer",
                        "data": "view_pipeline",
                        "pipeline_id": self.analyzer_id,
                        "level_id": level_id,
                    }
                )
            
            pbar.update(1)
            

    def load_as_float(self):
        logger.debug(f"Loading analyzer result {self.analyzer_id} as float image.")

        # get analyzer output
        src = DataModel.g.dataset_uri(self.analyzer_id, group="analyzer")
        with DatasetManager(src, out=None, dtype="uint32", fillvalue=0) as DM:
            src_arr = DM.sources[0][:]
        # create new float image
        params = dict(feature_type="raw", workspace=True)
        result = Launcher.g.run("features", "create", **params)

        if result:
            fid = result["id"]
            ftype = result["kind"]
            fname = result["name"]
            logger.debug(f"Created new object in workspace {fid}, {ftype}, {fname}")

            dst = DataModel.g.dataset_uri(fid, group="features")
            with DatasetManager(dst, out=dst, dtype="float32", fillvalue=0) as DM:
                DM.out[:] = src_arr

            cfg.ppw.clientEvent.emit(
                {"source": "workspace_gui", "data": "refresh", "value": None}
            )

    def load_as_annotation(self):
        logger.debug(f"Loading analyzer result {self.analyzer_id} as annotation.")

        # get analyzer output
        src = DataModel.g.dataset_uri(self.analyzer_id, group="analyzer")
        with DatasetManager(src, out=None, dtype="uint32", fillvalue=0) as DM:
            src_arr = DM.sources[0][:]
        label_values = np.unique(src_arr)

        # create new level
        params = dict(level=self.analyzer_id, workspace=True)
        result = Launcher.g.run("annotations", "add_level", workspace=True)

        # create a blank label for each unique value in the analyzer output array
        if result:
            level_id = result["id"]

            for v in label_values:
                params = dict(
                    level=level_id,
                    idx=int(v),
                    name=str(v),
                    color="#11FF11",
                    workspace=True,
                )
                label_result = Launcher.g.run("annotations", "add_label", **params)

            # derive label colours from given annotation
            params = dict(
                level=str("001_level"),
                workspace=True,
            )
            anno_result = Launcher.g.run("annotations", "get_levels", **params)[0]

            params = dict(level=str(level_id), workspace=True)
            level_result = Launcher.g.run("annotations", "get_levels", **params)[0]

            try:
                # set the new level color mapping to the mapping from the given annotation
                for v in level_result["labels"].keys():
                    if v in anno_result["labels"]:
                        label_hex = anno_result["labels"][v]["color"]
                        label = dict(
                            idx=int(v),
                            name=str(v),
                            color=label_hex,
                        )
                        params = dict(level=result["id"], workspace=True)
                        label_result = Launcher.g.run(
                            "annotations", "update_label", **params, **label
                        )
            except Exception as err:
                logger.debug(f"Exception {err}")

            fid = result["id"]
            ftype = result["kind"]
            fname = result["name"]
            logger.debug(f"Created new object in workspace {fid}, {ftype}, {fname}")

            dst = DataModel.g.dataset_uri(fid, group="annotations")
            with DatasetManager(dst, out=dst, dtype="uint32", fillvalue=0) as DM:
                DM.out[:] = src_arr

            cfg.ppw.clientEvent.emit(
                {"source": "workspace_gui", "data": "refresh", "value": None}
            )

    def card_deleted(self):
        params = dict(analyzer_id=self.analyzer_id, workspace=True)
        result = Launcher.g.run("analyzer", "remove", **params)
        if result["done"]:
            self.setParent(None)

    def card_title_edited(self, newtitle):
        logger.debug(f"Edited analyzer title {newtitle}")
        params = dict(analyzer_id=self.analyzer_id, new_name=newtitle, workspace=True)
        result = Launcher.g.run("analyzer", "rename", **params)
        return result["done"]

    def update_params(self, params):
        logger.debug(f"Analyzer update params {params}")
        # for k, v in params.items():
        #     if k in self.widgets:
        #         self.widgets[k].setValue(v)
     

    def display_splitter_results(self, result):
        entities = []
        tabledata = []

        for i in range(len(result)):
            entry = (
                i,
                result[i][0],
                result[i][1],
                result[i][2],
                result[i][3],
                result[i][4],
                result[i][5],
                result[i][6],
                result[i][7],
                result[i][8],
                result[i][9],
                result[i][10],
                result[i][11],
                result[i][12],
                result[i][13],
                result[i][14],
                result[i][15],
                result[i][16],
            )
            tabledata.append(entry)

            entity = (result[i][0], result[i][2], result[i][1], 0)
            entities.append(entity)

        tabledata = np.array(
            tabledata,
            dtype=[
                ("index", int),
                ("z", int),
                ("x", int),
                ("y", int),
                ("Sum", float),
                ("Mean", float),
                ("Std", float),
                ("Var", float),
                ("BB Vol", float),
                ("BB Vol Log10", float),
                ("BB Depth", float),
                ("BB Height", float),
                ("BB Width", float),
                ("OrientBB Vol", float),
                ("OrientBB Vol Log10", float),
                ("OrientBB Depth", float),
                ("OrientBB Height", float),
                ("OrientBB Width", float),
            ],
        )

        if self.table_control is None:
            self.table_control = TableWidget()
            max_height = 500
            self.table_control.w.setProperty("header", False)
            self.table_control.w.setMaximumHeight(max_height)
            self.vbox.addWidget(self.table_control.w)
            self.total_height += 500 + self.spacing
            self.setMinimumHeight(self.total_height)
            
        self.table_control.set_data(tabledata)
        self.collapse()
        self.expand()
        self.tabledata = tabledata
        self.entities_arr = np.array(entities)

    def display_component_results(self, result):
        entities = []
        tabledata = []

        for i in range(len(result)):
            entry = (
                i,
                result[i][0],
                result[i][1],
                result[i][2],
                result[i][3],
            )
            tabledata.append(entry)

            entity = (result[i][1], result[i][2], result[i][3], 0)
            entities.append(entity)

        tabledata = np.array(
            tabledata,
            dtype=[
                ("index", int),
                ("area", int),
                ("z", int),
                ("x", int),
                ("y", int),
            ],
        )

        if self.table_control is None:
            self.table_control = TableWidget()
            self.add_row(self.table_control.w, max_height=500)

        self.table_control.set_data(tabledata)
        self.collapse()
        self.expand()

        self.entities_arr = np.array(entities)

    def display_component_results2(self, result):
        entities = []
        tabledata = []

        for i in range(len(result)):
            entry = (
                i,
                result[i][0],
                result[i][1],
                result[i][2],
                result[i][3],
            )
            tabledata.append(entry)

            entity = (result[i][1], result[i][2], result[i][3], 0)
            entities.append(entity)

        self.entities_arr = np.array(entities)

    def display_splitter_plot(self, feature_arrays, titles=[], vert_line_at=None):
        for i, feature_array in enumerate(feature_arrays):
            self.plots.append(MplCanvas(self, width=5, height=5, dpi=100))
            max_height = 600
            self.plots[i].setProperty("header", False)
            self.plots[i].setMaximumHeight(max_height)
            self.vbox.addWidget(self.plots[i])
            self.total_height += 500 + self.spacing
            self.setMinimumHeight(self.total_height)

            colors = ['r','y','b','c','m','g']
            y, x, _ = self.plots[i].axes.hist(feature_array, bins=16, color=colors[i])

            self.plots[i].axes.set_title(titles[i])
            
            if vert_line_at:
                print(f"Plotting vertical line at: {vert_line_at[i]} {y.max()}")
                self.plots[i].axes.axvline(x=vert_line_at[i], ymin=0, ymax=y.max(), color="k")

        
    def clustering_plot(self):
        src = DataModel.g.dataset_uri(self.features_source.value())
        dst = DataModel.g.dataset_uri(self.analyzer_id, group="analyzer")
        all_params = dict(src=src, dst=dst, modal=False)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["feature_ids"] = str(self.features_source.value()[-1])
        all_params["object_id"] = str(self.objects_source.value())
        logger.debug(f"Running analyzer with params {all_params}")
        result = Launcher.g.run("analyzer", "image_stats", **all_params)
        if result:
            src_arr = decode_numpy(result)
            sc = MplCanvas(self, width=5, height=4, dpi=100)
            sc.axes.imshow(src_arr)
            self.add_row(sc, max_height=300)

    def export_csv(self):
        full_path = QtWidgets.QFileDialog.getSaveFileName(
            self, "Select output filename", ".", filter="*.csv"
        )
        if isinstance(full_path, tuple):
            full_path = full_path[0]

        out_df = pd.DataFrame(self.tabledata)
        out_df.to_csv(full_path)
        logger.debug(f"Exported to csv {full_path}")

    def calc_label_splitter(self):
        if len(self.plots) > 0:
            for plot in self.plots:
                plot.setParent(None)
                plot = None
            self.plots = []

        dst = DataModel.g.dataset_uri(self.analyzer_id, group="analyzer")
        src = DataModel.g.dataset_uri(self.pipelines_source.value(), group="pipelines")
        all_params = dict(src=src, dst=dst, modal=False)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["pipelines_id"] = str(self.pipelines_source.value())
        all_params["feature_id"] = str(self.feature_source.value())
        all_params["analyzers_id"] = str(self.analyzers_source.value())
        all_params["annotations_id"] = str(self.annotations_source.value())
        
        all_params["mode"] = self.radio_group.checkedId()
        all_params["background_label"] = self.background_label.value()

        split_ops = {}
        split_feature_indexes = []
        split_feature_thresholds = []

        if len(self.op_cards) > 0:
            for j, op_card in enumerate(self.op_cards):
                split_op = {}
                split_feature_index = int(op_card.feature_name_combo_box.value())
                split_op["split_feature_index"] = str(split_feature_index)
                split_feature_indexes.append(split_feature_index)
                split_op["split_op"] = op_card.split_op_combo_box.value()
                split_op["split_threshold"] = op_card.split_threshold.value()
                split_feature_thresholds.append(float(op_card.split_threshold.value()))
                split_ops[j] = split_op

        else:
            split_op = {}
            split_feature_index = int(self.feature_name_combo_box.value())
            split_op["split_feature_index"] = str(split_feature_index)
            split_feature_indexes.append(split_feature_index)
            split_op["split_op"] = 1
            split_op["split_threshold"] = 0
            split_feature_thresholds.append(0)
            split_ops[0] = split_op

        all_params["split_ops"] = split_ops


        logger.debug(f"Running analyzer with params {all_params}")
        result_features, features_array = Launcher.g.run(
            "analyzer", "label_splitter", **all_params
        )
        features_ndarray = np.array(features_array)
        print(f"Shape of features_array {features_ndarray.shape}")
        
        if features_array:
            logger.debug(f"Segmentation stats result table {len(features_array)}")                
            feature_arrays = []
            feature_titles = []
            
            for j,s in enumerate(split_feature_indexes):
                print(s)
                feature_title = feature_names[int(s)]
                print(feature_title)
                feature_plot_array = features_ndarray[:, int(s)] 
                feature_arrays.append(feature_plot_array)
                feature_titles.append(feature_title)
                print(f"Titles of feature names{feature_titles}")
                print(f"Split feature thresholds: {split_feature_thresholds}")
            self.display_splitter_plot(feature_arrays, titles=feature_titles, vert_line_at=split_feature_thresholds)
            self.display_splitter_results(result_features)

    def calc_level_image_stats(self):
        dst = DataModel.g.dataset_uri(self.analyzer_id, group="analyzer")
        anno_id = DataModel.g.dataset_uri(self.annotations_source.value())
        all_params = dict(dst=dst, modal=False)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["anno_id"] = anno_id
        all_params["label_index"] = self.label_index.value()
        logger.debug(f"Running analyzer with params {all_params}")
        result = Launcher.g.run("analyzer", "level_image_stats", **all_params)
        if result:
            logger.debug(f"Level Image stats result table {len(result)}")
            self.display_component_results(result)

    def calc_find_connected_components(self):
        dst = DataModel.g.dataset_uri(self.analyzer_id, group="analyzer")
        src = DataModel.g.dataset_uri(self.pipelines_source.value(), group="pipelines")
        all_params = dict(src=src, dst=dst, modal=False)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["pipelines_id"] = str(self.pipelines_source.value())
        all_params["label_index"] = self.label_index.value()
        logger.debug(f"Running analyzer with params {all_params}")
        result = Launcher.g.run(
            "analyzer", "find_connected_components", **all_params
        )
        print(result)
        if result:
            logger.debug(f"Segmentation stats result table {len(result)}")
            self.display_component_results(result)
    
    def calc_binary_image_stats(self):
        dst = DataModel.g.dataset_uri(self.analyzer_id, group="analyzer")
        src = DataModel.g.dataset_uri(self.feature_source.value())
        all_params = dict(src=src, dst=dst, modal=False)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["feature_id"] = str(self.feature_source.value())
        all_params["threshold"] = self.threshold.value()

        logger.debug(f"Running analyzer with params {all_params}")
        result = Launcher.g.run("analyzer", "binary_image_stats", **all_params)
        if result:
            logger.debug(f"Segmentation stats result table {len(result)}")
            self.display_component_results(result)

    def calc_image_stats(self):
        dst = DataModel.g.dataset_uri(self.analyzer_id, group="analyzer")
        src = DataModel.g.dataset_uri(self.features_source.value())
        all_params = dict(src=src, dst=dst, modal=False)
        all_params["workspace"] = DataModel.g.current_workspace

        all_params["feature_ids"] = str(self.features_source.value()[-1])

        logger.debug(f"Running analyzer with params {all_params}")
        result = Launcher.g.run("analyzer", "image_stats", **all_params)
        if result:
            src_arr = decode_numpy(result)
            sc = MplCanvas(self, width=5, height=4, dpi=100)
            sc.axes.imshow(src_arr)
            self.add_row(sc, max_height=300)

    def calc_object_stats2(self):
        dst = DataModel.g.dataset_uri(self.analyzer_id, group="analyzer")
        src = DataModel.g.dataset_uri(self.features_source.value())
        all_params = dict(src=src, dst=dst, modal=False)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["feature_ids"] = str(self.features_source.value()[-1])
        all_params["object_id"] = str(self.objects_source.value())
        logger.debug(f"Running analyzer with params {all_params}")
        result = Launcher.g.run("analyzer", "object_stats", **all_params)
        if result:
            src_arr = decode_numpy(result)
            sc = MplCanvas(self, width=5, height=4, dpi=100)
            sc.axes.imshow(src_arr)
            self.add_row(sc, max_height=300)

    def calc_object_stats(self):
        dst = DataModel.g.dataset_uri(self.analyzer_id, group="analyzer")
        src = DataModel.g.dataset_uri(self.features_source.value())
        all_params = dict(src=src, dst=dst, modal=False)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["feature_ids"] = str(self.features_source.value()[-1])
        all_params["object_id"] = str(self.objects_source.value())
        all_params["stat_name"] = self.stat_name_combo_box.value()
        logger.debug(f"Running analyzer with params {all_params}")
        result = Launcher.g.run("analyzer", "object_stats", **all_params)
        (point_features, img) = result
        if result:
            logger.debug(f"Object stats result table {len(point_features)}")
            tabledata = []
            for i in range(len(point_features)):
                entry = (i, point_features[i])
                tabledata.append(entry)
            tabledata = np.array(
                tabledata,
                dtype=[
                    ("index", int),
                    ("z", float),
                ],
            )
            src_arr = decode_numpy(img)
            sc = MplCanvas(self, width=6, height=5, dpi=80)
            sc.axes.imshow(src_arr)
            sc.axes.axis("off")
            self.add_row(sc, max_height=500)
            if self.table_control is None:
                self.table_control = TableWidget()
                self.add_row(self.table_control.w, max_height=500)

            self.table_control.set_data(tabledata)
            self.collapse()
            self.expand()


    def calc_object_detection_stats(self):  
        src = DataModel.g.dataset_uri(self.features_source.value())
        all_params = dict(src=src, dst=dst, modal=False)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["predicted_objects_id"] = str(
            self.predicted_objects_source.value()
        )
        all_params["gold_objects_id"] = str(self.gold_objects_source.value())

        logger.debug(f"Running object detection analyzer with params {all_params}")
        result = Launcher.g.run("analyzer", "object_detection_stats", **all_params)
        
    def calc_detector_predict(self):
        src = DataModel.g.dataset_uri(
            self.features_source.value()[0], group="pipelines"
        )
        feature_names_list = [
            n.rsplit("/", 1)[-1] for n in self.features_source.value()
        ]
        all_params = dict(src=src, dst=dst, modal=True)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["object_id"] = str(self.objects_source.value())
        all_params["feature_ids"] = feature_names_list
        all_params["model_fullname"] = self.model_fullname
        all_params["dst"] = self.pipeline_id
        result = Launcher.g.run("pipelines", self.analyzer_type, **all_params)

    def calc_spatial_clustering(self):
        src = DataModel.g.dataset_uri(self.feature_source.value())
        all_params = dict(src=src, dst=None, modal=False)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["feature_id"] = str(self.feature_source.value())
        all_params["object_id"] = str(self.objects_source.value())
        all_params["params"] = {"algorithm": "DBSCAN", "eps": self.eps.value(), "min_samples": 1}
        result = Launcher.g.run("analyzer", "spatial_clustering", **all_params)
        print(result)
        logger.debug(f"spatial clustering result table {len(result)}")
        self.display_component_results2(result)

    def calc_removed_masked_objects(self):
        src = DataModel.g.dataset_uri(self.feature_source.value())
        all_params = dict(src=src, dst=dst, modal=False)
        all_params["workspace"] = DataModel.g.current_workspace
        all_params["feature_id"] = str(self.feature_source.value())
        all_params["object_id"] = str(self.objects_source.value())
        result = Launcher.g.run("analyzer", "remove_masked_objects", **all_params)
        logger.debug(f"remove_masked_objects result table {len(result)}")
        self.display_component_results2(result)

    # calculate
    def calculate_analyzer(self):
         with progress(total=2) as pbar:
            pbar.set_description("Calculating pipeline")
            pbar.update(1)

            if self.analyzer_type == "label_splitter":
                self.calc_label_splitter()
            elif self.analyzer_type == "find_connected_components":
                self.calc_find_connected_components()    
            elif self.analyzer_type == "level_image_stats":
                self.calc_level_image_stats()
            elif self.analyzer_type == "binary_image_stats":
                self.calc_binary_image_stats()
            elif self.analyzer_type == "image_stats":
                self.calc_image_stats()
            elif self.analyzer_type == "object_stats2":
                self.calc_object_stats2()
            elif self.analyzer_type == "object_stats":
                self.calc_object_stats2()
            elif self.analyzer_type == "object_detection_stats":
                self.calc_object_detection_stats()
            elif self.analyzer_type == "detector_predict":
                self.calc_detector_predict()
            elif self.analyzer_type == "spatial_clustering":
                self.calc_spatial_clustering()
            elif self.analyzer_type == "remove_masked_objects":
                self.calc_removed_masked_objects()

            pbar.update(2)
Ejemplo n.º 7
0
class SupervoxelCard(Card):
    def __init__(self, svid, svname, parent=None):
        super().__init__(
            title=svname, collapsible=True, removable=True, editable=True, parent=parent
        )
        self.svid = svid
        self.svname = svname

        from survos2.frontend.plugins.features import FeatureComboBox

        self.svsource = FeatureComboBox()
        self.svsource.setMaximumWidth(250)
        self.svshape = LineEdit(parse=int, default=10)
        self.svshape.setMaximumWidth(250)
        self.svspacing = LineEdit3D(parse=float, default=1)
        self.svspacing.setMaximumWidth(250)
        self.svcompactness = LineEdit(parse=float, default=20)
        self.svcompactness.setMaximumWidth(250)

        self.int64_checkbox = CheckBox(checked=False)

        self.compute_btn = PushButton("Compute")
        self.view_btn = PushButton("View", accent=True)

        self.add_row(HWidgets("Source:", self.svsource, stretch=1))
        self.add_row(HWidgets("Shape:", self.svshape, stretch=1))
        self.add_row(HWidgets("Spacing:", self.svspacing, stretch=1))
        self.add_row(HWidgets("Compactness:", self.svcompactness, stretch=1))
        self.add_row(HWidgets("Int64:", self.int64_checkbox, stretch=1))

        self.add_row(HWidgets(None, self.compute_btn))
        self.add_row(HWidgets(None, self.view_btn))

        self.compute_btn.clicked.connect(self.compute_supervoxels)
        self.view_btn.clicked.connect(self.view_regions)

    def card_deleted(self):
        params = dict(region_id=self.svid, workspace=True)
        result = Launcher.g.run("superregions", "remove", **params)
        if result["done"]:
            self.setParent(None)

        cfg.ppw.clientEvent.emit(
            {
                "source": "superregions",
                "data": "remove_layer",
                "layer_name": self.svid,
            }
        )

    def card_title_edited(self, newtitle):
        logger.debug(f"Edited region title {newtitle}")
        params = dict(region_id=self.svid, new_name=newtitle, workspace=True)
        result = Launcher.g.run("superregions", "rename", **params)
        return result["done"]

    def view_regions(self):
        logger.debug(f"Transferring supervoxels {self.svid} to viewer")
        with progress(total=2) as pbar:
            pbar.set_description("Viewing feature")
            pbar.update(1)
            print(f"Current Supervoxels: {cfg.current_supervoxels}")
            cfg.ppw.clientEvent.emit(
                {"source": "superregions", "data": "view_regions", "region_id": self.svid}
            )
            pbar.update(1)

    def compute_supervoxels(self):

        with progress(total=4) as pbar:
            pbar.set_description("Refreshing")
            pbar.update(1)
                
            # src = [
            #    DataModel.g.dataset_uri("features/" + s) for s in [self.svsource.value()]
            # ]
            src = DataModel.g.dataset_uri(self.svsource.value(), group="features")
            dst = DataModel.g.dataset_uri(self.svid, group="superregions")
            logger.debug(f"Compute sv: Src {src} Dst {dst}")

            from survos2.model import Workspace

            ws = Workspace(DataModel.g.current_workspace)
            num_chunks = np.prod(np.array(ws.metadata()["chunk_grid"]))
            chunk_size = ws.metadata()["chunk_size"]
            logger.debug(
                f"Using chunk_size {chunk_size} to compute number of supervoxel segments for num_chunks: {num_chunks}."
            )

            with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
                src_dataset_shape = DM.sources[0][:].shape

            # n_segments = int(np.prod(chunk_size) // (self.svshape.value() ** 3))
            pbar.update(1)
            n_segments = int(np.prod(src_dataset_shape) / self.svshape.value() ** 3)

            if self.int64_checkbox.value():
                out_dtype = "uint64"
            else:
                out_dtype = "uint32"

            params = dict(
                src=src,
                dst=dst,
                compactness=round(self.svcompactness.value() / 100, 3),
                # shape=self.svshape.value(),
                n_segments=n_segments,
                spacing=self.svspacing.value(),
                modal=True,
                out_dtype=out_dtype,
            )
            logger.debug(f"Compute supervoxels with params {params}")

            pbar.update(1)

            result = Launcher.g.run("superregions", "supervoxels", **params)
            if result is not None:
                pbar.update(1)

    def update_params(self, params):
        if "shape" in params:
            self.svshape.setValue(params["shape"])
        if "compactness" in params:
            self.svcompactness.setValue(params["compactness"] * 100)
        if "spacing" in params:
            self.svspacing.setValue(params["spacing"])
        if "source" in params:
            for source in params["source"]:
                self.svsource.select(source)
Ejemplo n.º 8
0
 def __init__(self, name):
     super().__init__(name, FeatureComboBox(full=True))
Ejemplo n.º 9
0
class ObjectsCard(Card):
    def __init__(self,
                 objectsid,
                 objectsname,
                 objectsfullname,
                 objectstype,
                 parent=None):
        super().__init__(
            title=objectsname,
            collapsible=True,
            removable=True,
            editable=True,
            parent=parent,
        )
        self.objectsid = objectsid
        self.objectsname = objectsname
        self.object_scale = 1.0
        self.objectsfullname = objectsfullname
        self.objectstype = objectstype

        self.widgets = {}
        self.filewidget = FileWidget(extensions="*.csv", save=False)
        self.filewidget.path.setText(self.objectsfullname)
        self.add_row(self.filewidget)
        self.filewidget.path_updated.connect(self.load_data)

        self.compute_btn = PushButton("Compute")
        self.view_btn = PushButton("View", accent=True)
        self.get_btn = PushButton("Get", accent=True)

        # self._add_param("scale", title="Scale: ", type="Float", default=1)
        # self._add_param("offset", title="Offset: ", type="FloatOrVector", default=0)
        # self._add_param(
        #     "crop_start", title="Crop Start: ", type="FloatOrVector", default=0
        # )
        # self._add_param(
        #     "crop_end", title="Crop End: ", type="FloatOrVector", default=9000
        # )

        self.flipxy_checkbox = CheckBox(checked=True)
        self.add_row(HWidgets(None, self.flipxy_checkbox, Spacing(35)))
        self.add_row(HWidgets(None, self.view_btn, self.get_btn, Spacing(35)))

        self.view_btn.clicked.connect(self.view_objects)
        self.get_btn.clicked.connect(self.get_objects)

        #cfg.object_scale = self.widgets["scale"].value()
        #cfg.object_offset = self.widgets["offset"].value()
        #cfg.object_crop_start = self.widgets["crop_start"].value()
        #cfg.object_crop_end = self.widgets["crop_end"].value()

        cfg.object_scale = 1.0
        cfg.object_offset = (0, 0, 0)
        cfg.object_crop_start = (0, 0, 0)
        cfg.object_crop_end = (1e9, 1e9, 1e9)

        if self.objectstype == "patches":
            self._add_annotations_source()
            self.entity_mask_bvol_size = LineEdit3D(default=64, parse=int)
            self._add_feature_source()

            self.make_entity_mask_btn = PushButton("Make entity mask",
                                                   accent=True)
            self.make_entity_mask_btn.clicked.connect(self.make_entity_mask)
            self.make_patches_btn = PushButton("Make patches", accent=True)
            self.make_patches_btn.clicked.connect(self.make_patches)
            self.train_fpn_btn = PushButton("Train FPN", accent=True)
            self.train_fpn_btn.clicked.connect(self.train_fpn)

            self.add_row(
                HWidgets(None, self.entity_mask_bvol_size,
                         self.make_entity_mask_btn, Spacing(35)))
            self.add_row(HWidgets(None, self.make_patches_btn, Spacing(35)))
            self.add_row(HWidgets(None, self.train_fpn_btn, Spacing(35)))

        self.table_control = TableWidget()
        self.add_row(self.table_control.w, max_height=500)
        cfg.entity_table = self.table_control

    def _add_param(self, name, title=None, type="String", default=None):
        if type == "Int":
            p = LineEdit(default=default, parse=int)
        elif type == "Float":
            p = LineEdit(default=default, parse=float)
        elif type == "FloatOrVector":
            p = LineEdit3D(default=default, parse=float)
        elif type == "IntOrVector":
            p = LineEdit3D(default=default, parse=int)
        else:
            p = None
        if title is None:
            title = name
        if p:
            self.widgets[name] = p
            self.add_row(HWidgets(None, title, p, Spacing(35)))

    def load_data(self, path):
        self.objectsfullname = path
        print(f"Setting objectsfullname: {self.objectsfullname}")

    def card_deleted(self):
        params = dict(objects_id=self.objectsid, workspace=True)
        result = Launcher.g.run("objects", "remove", **params)
        if result["done"]:
            self.setParent(None)
        self.table_control = None

    def _add_annotations_source(self):
        self.annotations_source = LevelComboBox(full=True)
        self.annotations_source.fill()
        self.annotations_source.setMaximumWidth(250)

        widget = HWidgets("Annotation:",
                          self.annotations_source,
                          Spacing(35),
                          stretch=1)
        self.add_row(widget)

    def card_title_edited(self, newtitle):
        logger.debug(f"Edited entity title {newtitle}")
        params = dict(objects_id=self.objectsid,
                      new_name=newtitle,
                      workspace=True)
        result = Launcher.g.run("objects", "rename", **params)
        return result["done"]

    def view_objects(self):
        logger.debug(f"Transferring objects {self.objectsid} to viewer")
        cfg.ppw.clientEvent.emit({
            "source": "objects",
            "data": "view_objects",
            "objects_id": self.objectsid,
            "flipxy": self.flipxy_checkbox.value(),
        })

    def update_params(self, params):
        if "fullname" in params:
            self.objectsfullname = params["fullname"]

    def _add_feature_source(self):
        self.feature_source = FeatureComboBox()
        self.feature_source.fill()
        self.feature_source.setMaximumWidth(250)

        widget = HWidgets("Feature:",
                          self.feature_source,
                          Spacing(35),
                          stretch=1)
        self.add_row(widget)

    def get_objects(self):
        #cfg.object_scale = self.widgets["scale"].value()
        #cfg.object_offset = self.widgets["offset"].value()
        #cfg.object_crop_start = self.widgets["crop_start"].value()
        #cfg.object_crop_end = self.widgets["crop_end"].value()

        dst = DataModel.g.dataset_uri(self.objectsid, group="objects")
        print(f"objectsfullname: {self.objectsfullname}")
        params = dict(
            dst=dst,
            fullname=self.objectsfullname,
            scale=cfg.object_scale,
            offset=cfg.object_offset,
            crop_start=cfg.object_crop_start,
            crop_end=cfg.object_crop_end,
        )
        logger.debug(f"Getting objects with params {params}")
        result = Launcher.g.run("objects",
                                "update_metadata",
                                workspace=True,
                                **params)

        if self.objectstype == "points":

            tabledata, self.entities_df = setup_entity_table(
                self.objectsfullname,
                scale=cfg.object_scale,
                offset=cfg.object_offset,
                crop_start=cfg.object_crop_start,
                crop_end=cfg.object_crop_end,
                flipxy=self.flipxy_checkbox.value())

        elif self.objectstype == "boxes":
            tabledata, self.entities_df = setup_bb_table(
                self.objectsfullname,
                scale=cfg.object_scale,
                offset=cfg.object_offset,
                crop_start=cfg.object_crop_start,
                crop_end=cfg.object_crop_end,
                flipxy=self.flipxy_checkbox.value())
        elif self.objectstype == "patches":
            tabledata, self.entities_df = setup_entity_table(
                self.objectsfullname,
                scale=cfg.object_scale,
                offset=cfg.object_offset,
                crop_start=cfg.object_crop_start,
                crop_end=cfg.object_crop_end,
                flipxy=self.flipxy_checkbox.value())

        cfg.tabledata = tabledata
        self.table_control.set_data(tabledata)

        print(f"Loaded tabledata {tabledata}")
        self.table_control.set_data(tabledata)
        self.collapse()
        self.expand()

    def make_entity_mask(self):
        src = DataModel.g.dataset_uri(self.feature_source.value(),
                                      group="features")
        with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
            src_array = DM.sources[0][:]

        entity_arr = np.array(self.entities_df)

        bvol_dim = self.entity_mask_bvol_size.value()
        entity_arr[:, 0] -= bvol_dim[0]
        entity_arr[:, 1] -= bvol_dim[1]
        entity_arr[:, 2] -= bvol_dim[2]

        from survos2.entity.entities import make_entity_mask

        gold_mask = make_entity_mask(src_array,
                                     entity_arr,
                                     flipxy=True,
                                     bvol_dim=bvol_dim)[0]

        # create new raw feature
        params = dict(feature_type="raw", workspace=True)
        result = Launcher.g.run("features", "create", **params)

        if result:
            fid = result["id"]
            ftype = result["kind"]
            fname = result["name"]
            logger.debug(
                f"Created new object in workspace {fid}, {ftype}, {fname}")

            dst = DataModel.g.dataset_uri(fid, group="features")
            with DatasetManager(dst, out=dst, dtype="float32",
                                fillvalue=0) as DM:
                DM.out[:] = gold_mask

            cfg.ppw.clientEvent.emit({
                "source": "objects_plugin",
                "data": "refresh",
                "value": None
            })

    def make_patches(self):
        src = DataModel.g.dataset_uri(self.feature_source.value(),
                                      group="features")
        with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
            src_array = DM.sources[0][:]

        objects_scale = 1.0
        entity_meta = {
            "0": {
                "name": "class1",
                "size": np.array((15, 15, 15)) * objects_scale,
                "core_radius": np.array((7, 7, 7)) * objects_scale,
            },
        }

        entity_arr = np.array(self.entities_df)

        combined_clustered_pts, classwise_entities = organize_entities(
            src_array, entity_arr, entity_meta, plot_all=False)

        wparams = {}
        wparams["entities_offset"] = (0, 0, 0)
        wparams["entity_meta"] = entity_meta
        wparams["workflow_name"] = "Make_Patches"
        wparams["proj"] = DataModel.g.current_workspace
        wf = PatchWorkflow([src_array], combined_clustered_pts,
                           classwise_entities, src_array, wparams,
                           combined_clustered_pts)

        src = DataModel.g.dataset_uri(self.annotations_source.value().rsplit(
            "/", 1)[-1],
                                      group="annotations")
        with DatasetManager(src, out=None, dtype="uint16", fillvalue=0) as DM:
            src_dataset = DM.sources[0]
            anno_level = src_dataset[:] & 15

        logger.debug(
            f"Obtained annotation level with labels {np.unique(anno_level)}")

        logger.debug(f"Making patches in path {src_dataset._path}")
        train_v_density = make_patches(wf,
                                       entity_arr,
                                       src_dataset._path,
                                       proposal_vol=(anno_level > 0) * 1.0,
                                       padding=(32, 32, 32),
                                       num_augs=0,
                                       max_vols=-1)

        self.patches = train_v_density

        cfg.ppw.clientEvent.emit({
            "source": "panel_gui",
            "data": "view_patches",
            "patches_fullname": train_v_density
        })

    def train_fpn(self):
        from survos2.entity.train import train_seg
        from survos2.entity.pipeline_ops import make_proposal

        wf_params = {}
        wf_params["torch_models_fullpath"] = "/experiments"
        model_file = train_seg(self.patches, wf_params, num_epochs=1)

        patch_size = (64, 64, 64)
        patch_overlap = (16, 16, 16)
        overlap_mode = "crop"
        model_type = "fpn3d"

        threshold_devs = 1.5,
        invert = True,

        src = DataModel.g.dataset_uri(self.feature_source.value(),
                                      group="features")
        with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
            src_array = DM.sources[0][:]

        proposal = make_proposal(
            src_array,
            os.path.join(wf_params["torch_models_fullpath"], model_file),
            model_type=model_type,
            patch_size=patch_size,
            patch_overlap=patch_overlap,
            overlap_mode=overlap_mode,
        )

        # create new float image
        params = dict(feature_type="raw", workspace=True)
        result = Launcher.g.run("features", "create", **params)

        if result:
            fid = result["id"]
            ftype = result["kind"]
            fname = result["name"]
            logger.debug(
                f"Created new object in workspace {fid}, {ftype}, {fname}")

            dst = DataModel.g.dataset_uri(fid, group="features")
            with DatasetManager(dst, out=dst, dtype="float32",
                                fillvalue=0) as DM:
                DM.out[:] = proposal

            cfg.ppw.clientEvent.emit({
                "source": "workspace_gui",
                "data": "refresh",
                "value": None
            })