Ejemplo n.º 1
0
class ClusterProjectionsGroupProperties:
    _prop_builder: ObserverPropertiesBuilder
    _is_rgb: bool = False
    tensor_view_projection: TensorViewProjection

    def __init__(self):
        self._prop_builder = ObserverPropertiesBuilder(self)
        self.tensor_view_projection = TensorViewProjection(is_buffer=False)

    def project_and_scale(self, tensor: torch.Tensor):
        tensor, projection_params = self.tensor_view_projection.transform_tensor(
            tensor, self.is_rgb)
        return tensor, projection_params

    @property
    def is_rgb(self) -> bool:
        return self._is_rgb

    @is_rgb.setter
    def is_rgb(self, value: bool):
        self._is_rgb = value

    def get_properties(self) -> List[ObserverPropertiesItem]:
        properties = [self._prop_builder.auto("RGB",
                                              type(self).is_rgb)
                      ] + self.tensor_view_projection.get_properties()

        header_name = f'Projections'
        for prop in properties:
            prop.name = f"{header_name}.{prop.name}"

        return [
            self._prop_builder.collapsible_header(header_name, True),
            *properties
        ]
Ejemplo n.º 2
0
class ClusterObserver(ClusterObservable):
    _sequences_builder: SequencesBuilder
    _show_cluster_centers: bool
    _show_cluster_datapoints: bool
    _show_spring_lines: bool
    _show_spline_arrows: bool
    _projection_type: ClusterObserverProjection
    _prop_builder: ObserverPropertiesBuilder
    _n_cluster_centers: int
    _n_sequences: int
    _sequence_length: int

    _width: int = 640
    _height: int = 480
    _has_temporal_pooler: bool

    def __init__(self, tensor_provider: TensorProvider):
        self._has_temporal_pooler = tensor_provider.has_temporal_pooler()

        self._n_cluster_centers = tensor_provider.n_cluster_centers()
        self._n_sequences = tensor_provider.n_sequences()
        self._sequence_length = tensor_provider.sequence_length()

        self.cluster_centers = ClusterCentersDataBuilder(tensor_provider)
        self.fdsim = FDsimDataBuilder(tensor_provider)
        self.n_dims = 2
        self.pca = PcaDataBuilder(tensor_provider)
        self.spring_lines = SpringLinesBuilder(tensor_provider)
        self.spline_arrows = SplineArrowsBuilder(tensor_provider)
        self._prop_builder = ObserverPropertiesBuilder()
        self._sequences_builder = SequencesBuilder(tensor_provider)
        self._show_cluster_centers = True
        self._show_cluster_datapoints = True
        self._show_spring_lines = self._has_temporal_pooler
        self._show_spline_arrows = self._has_temporal_pooler
        self._projection_type = ClusterObserverProjection.PCA
        # self._pca_transformer = PcaTransformer()

    def get_data(self) -> ClusterObserverData:
        # if self._projection_type == ClusterObserverProjection.PCA:
        #     self.pca.update_pca_transformer(self._pca_transformer)

        return ClusterObserverData(
            cluster_centers=self.cluster_centers.get_data()
            if self._show_cluster_centers else None,
            fdsim=self.fdsim.get_data(),
            n_dims=self.n_dims,
            n_cluster_centers=self._n_cluster_centers,
            n_sequences=self._n_sequences,
            sequence_length=self._sequence_length,
            pca=self.pca.get_data(self.n_dims, self._show_cluster_datapoints)
            if self._projection_type == ClusterObserverProjection.PCA else
            None,
            projection_type="PCA" if self._projection_type
            == ClusterObserverProjection.PCA else "FDsim",
            width=self._width,
            height=self._height,
            spring_lines=self.spring_lines.get_data()
            if self._show_spring_lines else None,
            sequences=self._sequences_builder.get_data(),
            spline_arrows=self.spline_arrows.get_data()
            if self._show_spline_arrows else None,
        )

    def get_properties(self) -> List[ObserverPropertiesItem]:
        def update_projection_dim(value):
            if int(value) == 0:
                self.n_dims = 2
            else:
                self.n_dims = 3
            return value

        def update_show_cluster_centers(value: bool) -> bool:
            self._show_cluster_centers = value
            return value

        def update_show_cluster_datapoints(value: bool) -> bool:
            self._show_cluster_datapoints = value
            return value

        def update_show_spring_lines(value: bool) -> bool:
            self._show_spring_lines = value
            return value

        def update_show_spline_arrows(value: bool) -> bool:
            self._show_spline_arrows = value
            return value

        def format_projection_type(value: ClusterObserverProjection) -> int:
            if value == ClusterObserverProjection.PCA:
                return 0
            elif value == ClusterObserverProjection.FD_SIM:
                return 1
            else:
                raise IllegalArgumentException(
                    f'Unrecognized projection {value}')

        def update_projection_type(value):
            old_type = self._projection_type
            if int(value) == 0:
                self._projection_type = ClusterObserverProjection.PCA
            elif int(value) == 1:
                self._projection_type = ClusterObserverProjection.FD_SIM
            else:
                raise IllegalArgumentException(
                    f'Unrecognized projection {value}')

            if self._projection_type == ClusterObserverProjection.PCA and old_type != ClusterObserverProjection.PCA:
                self.pca.reset()

            return value

        def reset_projection(value):
            if self._projection_type == ClusterObserverProjection.PCA:
                self.pca.reset()
            elif self._projection_type == ClusterObserverProjection.FD_SIM:
                self.fdsim.reset()
            else:
                raise IllegalArgumentException(
                    f'Unrecognized projection {value}')

        def update_width(value):
            self._width = int(value)
            return value

        def update_height(value):
            self._height = int(value)
            return value

        def yield_props():
            yield ObserverPropertiesItem(
                'Projection',
                'select',
                format_projection_type(self._projection_type),
                update_projection_type,
                select_values=[
                    ObserverPropertiesItemSelectValueItem('PCA'),
                    ObserverPropertiesItemSelectValueItem('Force simulation')
                ],
                state=ObserverPropertiesItemState.ENABLED
                if self._has_temporal_pooler else
                ObserverPropertiesItemState.READ_ONLY)

            yield ObserverPropertiesItem(
                'Projection dimensionality',
                'select',
                0 if self.n_dims == 2 else 1,
                update_projection_dim,
                select_values=[
                    ObserverPropertiesItemSelectValueItem('2D'),
                    ObserverPropertiesItemSelectValueItem('3D')
                ])

            yield ObserverPropertiesItem('Reset Projection', 'button', "Reset",
                                         reset_projection)

            # Enablers
            yield self._prop_builder.checkbox('Show Cluster Centers',
                                              self._show_cluster_centers,
                                              update_show_cluster_centers)
            yield self._prop_builder.checkbox(
                'Show Cluster Datapoints',
                self._show_cluster_datapoints if self._projection_type
                == ClusterObserverProjection.PCA else False,
                update_show_cluster_datapoints,
                state=ObserverPropertiesItemState.ENABLED
                if self._projection_type == ClusterObserverProjection.PCA else
                ObserverPropertiesItemState.DISABLED)
            yield self._prop_builder.checkbox(
                'Show Spring Lines',
                self._show_spring_lines
                if self._has_temporal_pooler else False,
                update_show_spring_lines,
                state=ObserverPropertiesItemState.ENABLED
                if self._has_temporal_pooler else
                ObserverPropertiesItemState.DISABLED)
            yield self._prop_builder.checkbox(
                'Show Spline Arrows',
                self._show_spline_arrows
                if self._has_temporal_pooler else False,
                update_show_spline_arrows,
                state=ObserverPropertiesItemState.ENABLED
                if self._has_temporal_pooler else
                ObserverPropertiesItemState.DISABLED)

            # Cluster Centers
            yield self._prop_builder.collapsible_header(
                'Cluster Centers', default_is_expanded=True)
            yield from self.cluster_centers.get_properties(
                enabled=self._show_cluster_centers)

            # Spline Arrows
            yield self._prop_builder.collapsible_header(
                'Spline Arrows', default_is_expanded=True)
            yield from self.spline_arrows.get_properties(
                enabled=self._show_spline_arrows)

            # Canvas
            yield self._prop_builder.collapsible_header(
                'Canvas', default_is_expanded=True)
            yield ObserverPropertiesItem('Width', 'number', self._width,
                                         update_width)
            yield ObserverPropertiesItem('Height', 'number', self._height,
                                         update_height)

            # Force Simulation
            if self._has_temporal_pooler:
                yield ObserverPropertiesItem('Force simulation',
                                             'collapsible_header', True,
                                             lambda _: "True")
                yield from self.fdsim.get_properties()

        return list(yield_props())
Ejemplo n.º 3
0
class HierarchicalObserver(Observable):
    # minimum observer size in pixels, used for automatic rescaling of observers which are too small

    # used to hack persistence
    _groups_max_count: int = 10
    _default_properties: Dict[int, HierarchicalGroupProperties]

    _properties: Dict[int, HierarchicalGroupProperties]
    _grouped_projections: List[List[torch.Tensor]]
    prop_builder: ObserverPropertiesBuilder
    _items_per_row: int = 1
    _groups_stacking: HierarchicalObservableGroupsStacking = HierarchicalObservableGroupsStacking.HORIZONTAL
    minimal_group_size: int = 10

    def __init__(self, node: HierarchicalObservableNode, expert_no: int):
        super().__init__()
        self._node = node
        self._expert_no = expert_no

        self._properties = {}

        self._grouped_projections = None
        self.prop_builder = ObserverPropertiesBuilder()

        # TODO HACK - persisted values are loaded prior to the node unit initialization which determines the number
        # of groups
        # properties not initialized - create dummy properties just to fix persistence
        self._default_properties = {
            i: HierarchicalGroupProperties(i, self)
            for i in range(self._groups_max_count)
        }

    def get_data(self) -> HierarchicalObservableData:
        self._grouped_projections = grouped_projections = get_inverse_projections_for_all_clusters(
            self._node, self._expert_no)

        for i in range(len(grouped_projections)):
            if i not in self._properties:
                if i < len(self._default_properties):
                    # New group - load default properties (with loaded data from persistence storage)
                    self._properties[i] = self._default_properties[i]
                else:
                    logger.warning(
                        f'Hierarchical observer {self._node.name_with_id}.expert_{self._expert_no}: Too '
                        f'many groups found, values will not be persisted. Increase self._groups_max_count.'
                    )
                    self._properties[i] = HierarchicalGroupProperties(i, self)

        image_groups = []
        params_groups = []
        for i, projection_group in enumerate(grouped_projections):
            group_properties = self._properties[i]

            group_images = []
            group_projection_params = None
            for projection in projection_group:
                tensor, projection_params = group_properties.project_and_scale(
                    projection)
                group_images.append(tensor)

                # These are not appended, they are all the same.
                group_projection_params = projection_params

            image_groups.append(group_images)
            params_groups.append(
                HierarchicalObservableParams(
                    scale=group_properties.scale,
                    projection=group_projection_params))

        return HierarchicalObservableData(self._groups_stacking,
                                          self._items_per_row, image_groups,
                                          params_groups)

    def get_properties(self) -> List[ObserverPropertiesItem]:
        def update_items_per_row(value: int):
            self._items_per_row = value

        def update_minimal_group_size(value: int):
            self.minimal_group_size = value

        def update_groups_stacking(value):
            self._groups_stacking = value

        properties = [
            self.prop_builder.collapsible_header('Global', True),
            self.prop_builder.select('Global.Groups stacking',
                                     self._groups_stacking,
                                     update_groups_stacking,
                                     HierarchicalObservableGroupsStacking),
            self.prop_builder.number_int('Global.Items per row',
                                         self._items_per_row,
                                         update_items_per_row),
            self.prop_builder.number_int('Global.Minimal size',
                                         self.minimal_group_size,
                                         update_minimal_group_size)
        ]

        if len(self._properties) == 0:
            # Hack for property persistence - this branch is visited when the observer system is initialized
            # and persisted values are loaded into the properties - the placeholder properties are needed
            for group_id in self._default_properties:
                properties.extend(
                    self._default_properties[group_id].get_properties())
        else:
            for group_id in self._properties:
                properties.extend(self._properties[group_id].get_properties())

        return properties

    def request_callback(self, request_data: RequestData):
        data = request_data.data
        x = int(data['x'])
        y = int(data['y'])
        group_idx = int(data['group_idx'])
        image_idx = int(data['image_idx'])

        lookup_not_possible = (self._grouped_projections is None) or (len(
            self._grouped_projections) < group_idx + 1) or (
                group_idx not in self._properties
            ) or self._properties[group_idx].is_rgb or (len(
                self._grouped_projections[group_idx]) < image_idx + 1)

        if lookup_not_possible:
            value = float('nan')
        else:
            value = self._properties[
                group_idx].tensor_view_projection.value_at(
                    self._grouped_projections[group_idx][image_idx], x, y)

        return {"value": 'NaN' if math.isnan(value) else value}

    def get_callbacks(self) -> ObserverCallbacks:
        return ObserverCallbacks().add_request(self.request_callback)
class GradualLearningBasicTopology(Topology):
    """
    Long words utilizing context

    Interesting observers:
      gate:
        * SP Learn Process - Data Batch, sum over dim 1 (zero values means sequence not present in batch)
        * SP cluster centers
        * SP output forward clusters
      specialist:
        * SP_frequent_seqs_reconstruction - symbols reconstruction
        * TP_frequent_context_likelihood - show context per each symbol in learnt sequences(items per row 2)
        * TP_seq_likelihoods_by_cluster
    """
    _n_accuracy_2: AccuracyNode
    _n_accuracy_1: AccuracyNode
    _n_accuracy_single_2: AccuracyNode
    _n_accuracy_single_1: AccuracyNode
    _n_dataset_switch: DatasetSwitchNodeGroup
    _n_specialist: SpecialistNodeGroup
    _prop_builder: ObserverPropertiesBuilder
    _step_count: int = 0
    _active_dataset: int = 0

    def __init__(self, params: GradualLearningBasicTopologyParams = GradualLearningBasicTopologyParams()):
        super().__init__('cuda')
        self._prop_builder = ObserverPropertiesBuilder(self, source_type=ObserverPropertiesItemSourceType.MODEL)

        self._params = params
        self.create_topology()

    @property
    def params(self):
        return self._params

    def create_topology(self):
        """
                                        +----------------+
            +-------------+             | dataset_switch |
            |             |             +--+-----+-------+
            |             v                |     |
            |  +----------+------------+   |     |
            |  | context_feedback_pass |   |     |
            |  +--------------------+--+   |     |
            |                       |      |     |
            |                       v      v     |
            |               +-------+------+--+  |
            |               | gate_input_join |  |
            |               +-------+---------+  |
            |                       |            |
            |                       v            |
            |              +--------+---------+  |
            |              | gate_input_noise |  |
            |              +--------+---------+  |
            |                       |            |
            |                       v            |
            |                   +---+--+         |
            |                   | gate |         |
            |                   +---+--+         |
            |                       |            |
            |                       v            |
            |               +-------+--------+   +--------+
            |               | format_context |   |        |
            |               +-------+--------+   |        |
            |                       |            v        |
            |                       |     +------+-----+  |
            |                       ---->-+ specialist |  |
            |                             +--+--------++  |
            |                                |        |   |
            +--------------------------------+        v   v
                                                   ++--------++
                                                   | accuracy |
                                                   +----------+
        """

        n_gate = SpatialPoolerFlockNode(
            ExpertParams(flock_size=self._params.flock_size,
                         n_cluster_centers=self._params.seq_count,
                         spatial=SpatialPoolerParams(
                             # input_size=3,
                             enable_learning=True,
                             buffer_size=self._params.gate_buffer_size,
                             batch_size=100,
                             learning_rate=0.2,
                             learning_period=10,
                             cluster_boost_threshold=100,
                             max_boost_time=200
                         ),
                         ),
            name="Gate"
        )
        self.add_node(n_gate)

        # Specialist
        n_specialist = SpecialistNodeGroup(SpecialistNodeGroupParams(
            flock_size=self._params.flock_size,
            n_symbols=len(self._params.symbols),
            gate_input_context_multiplier=self._params.gate_input_context_multiplier,
            gate_input_context_avg_window_size=self._params.gate_input_context_avg_window_size,
            seq_count=self._params.seq_count,
            convert_context_to_one_hot=self._params.convert_context_to_one_hot
        ))
        self.add_node(n_specialist)
        self._n_specialist = n_specialist

        n_context_feedback_pass = PassNode((self._params.flock_size, self._params.seq_count))
        n_gate_input_join = JoinNode(dim=1, n_inputs=2)
        n_gate_input_noise = RandomNoiseNode(RandomNoiseParams(amplitude=0.0001))
        n_format_context = SPFormatContextNodeGroup(self._params.seq_count, self._params.flock_size)

        self.add_node(n_context_feedback_pass)
        self.add_node(n_gate_input_join)
        self.add_node(n_gate_input_noise)
        self.add_node(n_format_context)

        # Dataset
        n_dataset_switch = DatasetSwitchNodeGroup(DatasetSwitchNodeGroupParams(
            dataset_params=DatasetAlphabetNodeGroupParams(
                flock_size=self._params.flock_size,
                symbols=self._params.symbols,
                seq_length=self._params.seq_length,
                seq_count=self._params.seq_count,
                seq_repeat=self._params.seq_repeat
            ),
            flock_split=self._params.flock_split
        ))

        self._n_dataset_switch = n_dataset_switch
        self.add_node(n_dataset_switch)

        # dataset to specialist
        Connector.connect(n_dataset_switch.outputs.output, n_specialist.inputs.input)
        # specialist to gate
        Connector.connect(n_specialist.outputs.context_feedback, n_context_feedback_pass.inputs.input, is_backward=True)
        Connector.connect(n_context_feedback_pass.outputs.output, n_gate_input_join.inputs[0])
        # dataset to gate
        Connector.connect(n_dataset_switch.outputs.sequence_id_one_hot, n_gate_input_join.inputs[1])
        Connector.connect(n_gate_input_join.outputs.output, n_gate_input_noise.inputs.input)
        Connector.connect(n_gate_input_noise.outputs.output, n_gate.inputs.sp.data_input)
        # gate to specialist
        Connector.connect(n_gate.outputs.sp.forward_clusters, n_format_context.inputs.input)
        Connector.connect(n_format_context.outputs.output, n_specialist.inputs.context_input)

        # Measuring accuracy
        # Fork
        n_fork_dataset = ForkNode(0, [self._params.flock_split, self._params.flock_size - self._params.flock_split])
        n_fork_prediction = ForkNode(0, [self._params.flock_split, self._params.flock_size - self._params.flock_split])
        self.add_node(n_fork_dataset)
        self.add_node(n_fork_prediction)
        Connector.connect(n_dataset_switch.outputs.output, n_fork_dataset.inputs.input)
        Connector.connect(n_specialist.outputs.output, n_fork_prediction.inputs.input)

        self._n_accuracy_single_1 = AccuracyNode(1, name='Accuracy single 1')
        self.add_node(self._n_accuracy_single_1)
        Connector.connect(n_fork_dataset.outputs[0], self._n_accuracy_single_1.inputs.input_a)
        Connector.connect(n_fork_prediction.outputs[0], self._n_accuracy_single_1.inputs.input_b)

        self._n_accuracy_single_2 = AccuracyNode(1, name='Accuracy single 2')
        self.add_node(self._n_accuracy_single_2)
        Connector.connect(n_fork_dataset.outputs[1], self._n_accuracy_single_2.inputs.input_a)
        Connector.connect(n_fork_prediction.outputs[1], self._n_accuracy_single_2.inputs.input_b)

        self._n_accuracy_1 = AccuracyNode(self._params.accuracy_average_steps, name='Accuracy 1')
        self.add_node(self._n_accuracy_1)
        Connector.connect(n_fork_dataset.outputs[0], self._n_accuracy_1.inputs.input_a)
        Connector.connect(n_fork_prediction.outputs[0], self._n_accuracy_1.inputs.input_b)

        self._n_accuracy_2 = AccuracyNode(self._params.accuracy_average_steps, name='Accuracy 2')
        self.add_node(self._n_accuracy_2)
        Connector.connect(n_fork_dataset.outputs[1], self._n_accuracy_2.inputs.input_a)
        Connector.connect(n_fork_prediction.outputs[1], self._n_accuracy_2.inputs.input_b)

    def init_sp_clusters(self):
        self._n_dataset_switch.init_sp_clusters()
        self._n_specialist.init_sp_clusters()

    def set_sequences_filter(self, dataset_id: int, enabled_sequences: List[bool]):
        self._n_dataset_switch.set_sequences_filter(dataset_id, enabled_sequences)
        logger.info(f'sequence filter: {enabled_sequences}, step: {self._step_count}')

    @property
    def active_dataset(self) -> int:
        return self._active_dataset

    @active_dataset.setter
    def active_dataset(self, value: int):
        self._active_dataset = value
        self._n_dataset_switch.select_dataset(value)
        logger.info(f'active dataset: {value}, step: {self._step_count}')

    def get_properties(self) -> List[ObserverPropertiesItem]:
        props = super().get_properties()
        return props + [
            self._prop_builder.collapsible_header(f'Experiment', True),
            self._prop_builder.auto("Active dataset", type(self).active_dataset),
            *self._dataset_controll_buttons(0),
            *self._dataset_controll_buttons(1)
        ]

    def _dataset_controll_buttons(self, dataset_id: int) -> List[ObserverPropertiesItem]:
        patterns = [
            [False, False, False] * 2,
            [True, False, False] * 2,
            [False, True, False] * 2,
            [False, False, True] * 2,
            [True, True, False] * 2,
            [False, True, True] * 2,
            [True, False, True] * 2,
            [True, True, True] * 2,
            [True, True, True, True, True, False],
        ]

        def format_pattern(pattern: List[bool]) -> str:
            return "".join(['1' if p else '0' for p in pattern])

        return [
            self._prop_builder.button(f'Dataset {dataset_id} - {format_pattern(p)}',
                                      partial(self.set_sequences_filter, dataset_id, p))
            for p in patterns
        ]

    def get_accuracy_single_1(self) -> float:
        return self._n_accuracy_single_1.outputs.accuracy.tensor.item()

    def get_accuracy_per_flock_single_1(self) -> List[float]:
        return self._n_accuracy_single_1.outputs.accuracy_per_flock.tensor.tolist()

    def get_accuracy_1(self) -> float:
        return self._n_accuracy_1.outputs.accuracy.tensor.item()

    def get_accuracy_per_flock_1(self) -> List[float]:
        return self._n_accuracy_1.outputs.accuracy_per_flock.tensor.tolist()

    def get_accuracy_single_2(self) -> float:
        return self._n_accuracy_single_2.outputs.accuracy.tensor.item()

    def get_accuracy_per_flock_single_2(self) -> List[float]:
        return self._n_accuracy_single_2.outputs.accuracy_per_flock.tensor.tolist()

    def get_accuracy_2(self) -> float:
        return self._n_accuracy_2.outputs.accuracy.tensor.item()

    def get_accuracy_per_flock_2(self) -> List[float]:
        return self._n_accuracy_2.outputs.accuracy_per_flock.tensor.tolist()

    def get_actual_sequence_ids(self) -> List[int]:
        return self._n_dataset_switch.outputs.dataset_2_scalar_sequence_ids.tensor.tolist()

    def step(self):
        super().step()
        self._step_count += 1
Ejemplo n.º 5
0
class ObserverView(PropertiesObservable):
    """A node that encompasses all the model's observables and passes them on to the observer system."""
    _strip_observer_name_prefix: str

    _observables: Dict[str, Observable]
    _first_show: bool = True

    def __init__(self,
                 name: str,
                 observer_system: ObserverSystem,
                 strip_observer_name_prefix: str = ''):
        self._strip_observer_name_prefix = strip_observer_name_prefix
        self.name = name
        self._observer_system = observer_system
        self._observables = {}
        observer_system.signals.window_closed.connect(self.on_window_closed)
        self._prop_builder = ObserverPropertiesBuilder(self)

    def _persist(self):
        self._observer_system.persist_observer_values(self.name, self)

    def on_window_closed(self, observer_name: str):
        if observer_name in self._observables:
            self._observer_system.unregister_observer(observer_name, False)
            self._persist()

    def close(self):
        self._unregister_observers()
        self._observer_system.unregister_observer(self.name, True)

    def set_observables(self, observables: Dict[str, Observable]):
        self._unregister_observers()
        self._observables = observables
        # default is no observers visible
        # self._register_observers()
        if self._first_show:
            self._observer_system.register_observer(self.name, self)
            self._first_show = False

    def _register_observers(self):
        for name, observable in self._observables.items():
            self._observer_system.register_observer(name, observable)

    def _unregister_observers(self):
        for name in self._observables.keys():
            self._observer_system.unregister_observer(name, True)

    def get_properties(self) -> List[ObserverPropertiesItem]:
        def enable_observers_handler(prop_name: str, value: bool):
            if value:
                logger.debug(f"Register observer {name}")
                self._observer_system.register_observer(
                    prop_name, self._observables[prop_name])
            else:
                logger.debug(f"Unregister observer {name}")
                self._observer_system.unregister_observer(prop_name, True)

        def remove_prefix(text: str, prefix: str):
            if text.startswith(prefix):
                return text[len(prefix):]
            else:
                return text

        observers = []
        last_header = ''
        for name, observable in self._observables.items():
            observer_name = remove_prefix(name,
                                          self._strip_observer_name_prefix)
            header = observer_name.split('.')[0]
            observer_name = remove_prefix(observer_name, f'{header}.')
            # add collapsible_header
            if last_header != header:
                last_header = header
                observers.append(
                    self._prop_builder.collapsible_header(header, False))

            observers.append(
                self._prop_builder.checkbox(
                    observer_name,
                    self._observer_system.is_observer_registered(name),
                    partial(enable_observers_handler, name)))

        def set_all():
            self._register_observers()
            self._persist()

        def set_none():
            self._unregister_observers()
            self._persist()

        return [
            self._prop_builder.button('All', set_all),
            self._prop_builder.button('None', set_none),
        ] + observers