Example #1
0
    def _install_experts(self):
        self._top_level_flock_node = SpatialPoolerFlockNode(
            self._create_expert_params())
        self._flock_nodes = [
            ExpertFlockNode(self._create_expert_params()),
            ExpertFlockNode(self._create_expert_params()),
            self._top_level_flock_node
        ]
        for node in self._flock_nodes:
            self.add_node(node)

        unsqueeze_node_0 = UnsqueezeNode(0)
        self.add_node(unsqueeze_node_0)

        unsqueeze_node_1 = UnsqueezeNode(0)
        self.add_node(unsqueeze_node_1)

        Connector.connect(self.se_io.outputs.image_output,
                          unsqueeze_node_0.inputs.input)
        Connector.connect(unsqueeze_node_0.outputs.output,
                          self._flock_nodes[0].inputs.sp.data_input)
        Connector.connect(self._flock_nodes[0].outputs.tp.projection_outputs,
                          self._flock_nodes[1].inputs.sp.data_input)
        Connector.connect(self._flock_nodes[1].outputs.tp.projection_outputs,
                          self._join_node.inputs[0])
        Connector.connect(self.se_io.outputs.task_to_agent_label,
                          self._join_node.inputs[1])
        Connector.connect(self._join_node.outputs.output,
                          unsqueeze_node_1.inputs.input)
        Connector.connect(unsqueeze_node_1.outputs.output,
                          self._top_level_flock_node.inputs.sp.data_input)
Example #2
0
    def __init__(self,
                 params: DatasetAlphabetNodeGroupParams,
                 name: str = "DatasetAlphabetNodeGroup"):
        super().__init__(name,
                         inputs=EmptyInputs(self),
                         outputs=DatasetAlphabetNodeGroupOutputs(self))
        self._params = params

        dataset_node = self.create_node_dataset()

        sp_dataset_node = SpatialPoolerFlockNode(ExpertParams(
            flock_size=self._params.flock_size,
            n_cluster_centers=len(self._params.symbols),
            spatial=SpatialPoolerParams(enable_learning=False),
        ),
                                                 name="SP_dataset")

        # Dataset nodes
        self.add_node(dataset_node)
        self.add_node(sp_dataset_node)
        # Connect data output
        Connector.connect(dataset_node.outputs.outputs,
                          sp_dataset_node.inputs.sp.data_input)
        # Connect sequence_id output
        Connector.connect(dataset_node.outputs.sequence_ids_one_hot,
                          self.outputs.sequence_id_one_hot.input)
        Connector.connect(sp_dataset_node.outputs.sp.forward_clusters,
                          self.outputs.output.input)
        Connector.connect(dataset_node.outputs.sequence_ids,
                          self.outputs.scalar_sequence_ids.input)

        self._dataset_node = dataset_node
        self._sp_dataset_node = sp_dataset_node
    def _install_experts(self):
        im_width = self.se_io.get_image_width()
        im_height = self.se_io.get_image_height()
        self.input_dims = torch.Size((im_height, im_width, 3))
        self.parent_rf_dims = Size2D(im_height // self.EXPERTS_IN_X,
                                     im_width // self.EXPERTS_IN_X)

        lrf_node = ReceptiveFieldNode(input_dims=self.input_dims,
                                      parent_rf_dims=self.parent_rf_dims)

        self.add_node(lrf_node)

        self._top_level_flock_node = SpatialPoolerFlockNode(self.parent_params)
        self._mid_node = ExpertFlockNode(self.mid_expert_params)
        self._conv_node = ConvExpertFlockNode(self.conv_flock_params)

        self.add_node(self._top_level_flock_node)
        self.add_node(self._mid_node)
        self.add_node(self._conv_node)

        def rescale(inputs, outputs):
            if self.TRAINING:
                outputs[0].copy_(
                    inputs[0] *
                    1000)  # large constant to make the label more important
            else:
                outputs[0].copy_(inputs[0] * float('nan'))

        self._rescale_node = LambdaNode(rescale, 1,
                                        [(self.se_io.get_num_labels(), )])
        self.add_node(self._rescale_node)

        unsqueeze_node_0 = UnsqueezeNode(0)
        self.add_node(unsqueeze_node_0)
        unsqueeze_node_1 = UnsqueezeNode(0)
        self.add_node(unsqueeze_node_1)

        Connector.connect(self.se_io.outputs.image_output,
                          lrf_node.inputs.input)

        Connector.connect(lrf_node.outputs.output,
                          self._conv_node.inputs.sp.data_input)
        Connector.connect(self._conv_node.outputs.tp.projection_outputs,
                          unsqueeze_node_0.inputs.input)
        Connector.connect(unsqueeze_node_0.outputs.output,
                          self._mid_node.inputs.sp.data_input)
        Connector.connect(self._mid_node.outputs.tp.projection_outputs,
                          self._join_node.inputs[0])

        Connector.connect(self.se_io.outputs.task_to_agent_label,
                          self._rescale_node.inputs[0])
        Connector.connect(self._rescale_node.outputs[0],
                          self._join_node.inputs[1])
        Connector.connect(self._join_node.outputs.output,
                          unsqueeze_node_1.inputs.input)
        Connector.connect(unsqueeze_node_1.outputs.output,
                          self._top_level_flock_node.inputs.sp.data_input)
 def _install_experts(self):
     self._top_level_flock_node = SpatialPoolerFlockNode(self._create_expert_params())
     self.add_node(self._top_level_flock_node)
     self.unsqueeze_node = UnsqueezeNode(0)
     self.add_node(self.unsqueeze_node)
     Connector.connect(self.se_io.outputs.image_output, self._join_node.inputs[0])
     Connector.connect(self.se_io.outputs.task_to_agent_label, self._join_node.inputs[1])
     Connector.connect(self._join_node.outputs[0], self.unsqueeze_node.inputs.input)
     Connector.connect(self.unsqueeze_node.outputs.output, self._top_level_flock_node.inputs.sp.data_input)
Example #5
0
    def _install_experts(self):
        lrf_node = ReceptiveFieldNode(input_dims=self.input_dims,
                                      parent_rf_dims=self.parent_rf_dims)

        self.add_node(lrf_node)

        self._top_level_flock_node = SpatialPoolerFlockNode(self.parent_params,
                                                            name="Parent 1 SP")
        self._conv_node = ConvSpatialPoolerFlockNode(self.conv_flock_params,
                                                     name="Conv SP flock")

        self.add_node(self._top_level_flock_node)
        self.add_node(self._conv_node)

        scale = 1000

        def rescale_up(inputs, outputs):
            outputs[0].copy_(inputs[0] * scale)

        def rescale_down(inputs, outputs):
            outputs[0].copy_(inputs[0] / scale)

        self._rescale_up_node = LambdaNode(rescale_up,
                                           1, [(20, )],
                                           name="upscale 1000")
        self.add_node(self._rescale_up_node)

        self._rescale_down_node = LambdaNode(
            rescale_down,
            1, [(1, self._top_level_expert_output_size() + 20)],
            name="downscale 1000")
        self.add_node(self._rescale_down_node)

        unsqueeze_node = UnsqueezeNode(0)
        self.add_node(unsqueeze_node)

        Connector.connect(self.se_io.outputs.image_output,
                          lrf_node.inputs.input)

        Connector.connect(lrf_node.outputs.output,
                          self._conv_node.inputs.sp.data_input)
        Connector.connect(self._conv_node.outputs.sp.forward_clusters,
                          self._join_node.inputs[0])

        Connector.connect(self.se_io.outputs.task_to_agent_label,
                          self._rescale_up_node.inputs[0])
        Connector.connect(self._rescale_up_node.outputs[0],
                          self._join_node.inputs[1])
        Connector.connect(self._join_node.outputs.output,
                          unsqueeze_node.inputs.input)
        Connector.connect(unsqueeze_node.outputs.output,
                          self._top_level_flock_node.inputs.sp.data_input)

        Connector.connect(
            self._top_level_flock_node.outputs.sp.current_reconstructed_input,
            self._rescale_down_node.inputs[0])
Example #6
0
    def __init__(self,
                 dataset_seed: int = 123,
                 model_seed: int = 321,
                 baseline_seed: int = 333,
                 num_cc: int = 10,
                 batch_s: int = 300,
                 cbt: int = 1000,
                 lr=0.1,
                 examples_per_cl: int = None,
                 mbt: int = 1000):
        super().__init__("cuda")

        flock_size = 1  # TODO flock_size > 1 not supported by the adapter yet

        # define params
        self._sp_params = MnistSpTopology.get_sp_params(
            num_cluster_centers=num_cc,
            cluster_boost_threshold=cbt,
            learning_rate=lr,
            buffer_size=2 * batch_s,
            batch_size=batch_s,
            input_size=28 * 28,
            flock_size=flock_size,
            max_boost_time=mbt)

        self.output_dimension = flock_size * num_cc

        _mnist_params = MnistSpTopology.get_mnist_params(examples_per_cl)
        flock_input_size, flock_output_size = compute_lrf_params(28,
                                                                 28,
                                                                 1,
                                                                 eoy=1,
                                                                 eox=1)

        # define nodes
        self.node_sp = SpatialPoolerFlockNode(self._sp_params.clone(),
                                              seed=model_seed)
        self._lrf_node = ReceptiveFieldNode(flock_input_size,
                                            flock_output_size)
        self.node_mnist = DatasetMNISTNode(params=_mnist_params,
                                           seed=dataset_seed)
        self.node_random = RandomNumberNode(upper_bound=self.output_dimension,
                                            seed=baseline_seed)

        # add nodes and connect the graph
        self.add_node(self.node_mnist)
        self.add_node(self.node_sp)
        self.add_node(self._lrf_node)
        self.add_node(self.node_random)

        # connect MNIST->LRF->SP
        Connector.connect(self.node_mnist.outputs.data,
                          self._lrf_node.inputs[0])
        Connector.connect(self._lrf_node.outputs[0],
                          self.node_sp.inputs.sp.data_input)
    def _install_experts(self, flock_params: List[ExpertParams],
                         model_seed: int):

        self._flock_nodes = [
            ConvExpertFlockNode(flock_params[0], seed=model_seed),
            ExpertFlockNode(flock_params[1], seed=model_seed),
            SpatialPoolerFlockNode(flock_params[2], seed=model_seed)
        ]

        self._top_level_flock_node = self._flock_nodes[-1]

        for node in self._flock_nodes:
            self.add_node(node)

        # lrf
        self._install_lrf(self._image_size.value, self._experts_on_x)

        # join output of expert and label
        self._join_node = JoinNode(flatten=True)
        self.add_node(self._join_node)

        # scale node
        self._install_rescale(self._label_scale)

        unsqueeze_node_0 = UnsqueezeNode(0)
        self.add_node(unsqueeze_node_0)

        unsqueeze_node_1 = UnsqueezeNode(0)
        self.add_node(unsqueeze_node_1)

        # image -> LRF -> E1 -> E2 -> join
        Connector.connect(self.se_io.outputs.image_output,
                          self._node_lrf.inputs.input)
        Connector.connect(self._node_lrf.outputs.output,
                          self._flock_nodes[0].inputs.sp.data_input)
        Connector.connect(self._flock_nodes[0].outputs.tp.projection_outputs,
                          unsqueeze_node_0.inputs.input)
        Connector.connect(unsqueeze_node_0.outputs.output,
                          self._flock_nodes[1].inputs.sp.data_input)
        Connector.connect(self._flock_nodes[1].outputs.tp.projection_outputs,
                          self._join_node.inputs[0])

        # label -> rescale --------> join
        Connector.connect(self.se_io.outputs.task_to_agent_label,
                          self._rescale_node.inputs[0])
        Connector.connect(self._rescale_node.outputs[0],
                          self._join_node.inputs[1])

        # join -> top_level_expert
        Connector.connect(self._join_node.outputs.output,
                          unsqueeze_node_1.inputs.input)
        Connector.connect(unsqueeze_node_1.outputs.output,
                          self._top_level_flock_node.inputs.sp.data_input)
 def _make_middle_layer_expert(use_temporal_pooler: bool, n_middle_layer_cluster_centers: int) \
         -> (NodeBase, InputSlot, OutputSlotBase):
     params = ExpertParams()
     params.flock_size = 1
     params.n_cluster_centers = n_middle_layer_cluster_centers
     if use_temporal_pooler:
         expert_node: ExpertFlockNode = ExpertFlockNode(params)
         input_slot = expert_node.inputs.sp.data_input
         output_slot = expert_node.outputs.tp.projection_outputs
         node = expert_node
     else:
         sp_node: SpatialPoolerFlockNode = SpatialPoolerFlockNode(params)
         input_slot = sp_node.inputs.sp.data_input
         output_slot = sp_node.outputs.sp.forward_clusters
         node = sp_node
     return node, input_slot, output_slot
Example #9
0
    def _install_experts(self, flock_params: List[ExpertParams],
                         model_seed: int):

        self.flock_nodes = [
            ExpertFlockNode(flock_params[0], seed=model_seed),
            ExpertFlockNode(flock_params[1], seed=model_seed),
            SpatialPoolerFlockNode(flock_params[2], seed=model_seed)
        ]

        self._top_level_flock_node = self.flock_nodes[-1]

        for node in self.flock_nodes:
            self.add_node(node)

        self._join_node = JoinNode(flatten=True)
        self.add_node(self._join_node)

        self._install_rescale(self._label_scale)

        unsqueeze_node = UnsqueezeNode(0)
        self.add_node(unsqueeze_node)

        # image -> unsqueeze_node -> SP1 -> SP2 -> join
        Connector.connect(self.se_io.outputs.image_output,
                          unsqueeze_node.inputs.input)
        Connector.connect(unsqueeze_node.outputs.output,
                          self.flock_nodes[0].inputs.sp.data_input)
        Connector.connect(self.flock_nodes[0].outputs.tp.projection_outputs,
                          self.flock_nodes[1].inputs.sp.data_input)
        Connector.connect(self.flock_nodes[1].outputs.tp.projection_outputs,
                          self._join_node.inputs[0])

        # label -> rescale ----> join
        Connector.connect(self.se_io.outputs.task_to_agent_label,
                          self._rescale_node.inputs[0])
        Connector.connect(self._rescale_node.outputs[0],
                          self._join_node.inputs[1])

        unsqueeze_node_2 = UnsqueezeNode(0)
        self.add_node(unsqueeze_node_2)

        # join -> unsqueeze_node -> top_level_expert
        Connector.connect(self._join_node.outputs.output,
                          unsqueeze_node_2.inputs.input)
        Connector.connect(unsqueeze_node_2.outputs.output,
                          self._top_level_flock_node.inputs.sp.data_input)
    def __init__(self,
                 input_data_size: int,
                 labels_size: int,
                 sp_params: Optional[ExpertParams] = None,
                 name: str = "",
                 seed: Optional[int] = None):
        super().__init__("SpReconstructionLayer",
                         inputs=ClassificationInputs(self),
                         outputs=ClassificationOutputs(self))

        join_node = JoinNode(n_inputs=2, flatten=True, name=name + " Join")
        self.add_node(join_node)
        self.join_node = join_node
        Connector.connect(self.inputs.data.output, join_node.inputs[0])
        Connector.connect(self.inputs.label.output, join_node.inputs[1])

        unsqueeze_node = UnsqueezeNode(0)
        self.add_node(unsqueeze_node)
        Connector.connect(join_node.outputs.output,
                          unsqueeze_node.inputs.input)

        if sp_params is None:
            sp_params = ExpertParams()

        sp_node = SpatialPoolerFlockNode(sp_params,
                                         name=name + " SP Expert",
                                         seed=seed)
        self.add_node(sp_node)
        self.sp_node = sp_node
        Connector.connect(unsqueeze_node.outputs.output,
                          sp_node.inputs.sp.data_input)

        fork_node = ForkNode(1, [input_data_size, labels_size],
                             name=name + " Fork")
        self.add_node(fork_node)
        self.fork_node = fork_node
        Connector.connect(sp_node.outputs.sp.current_reconstructed_input,
                          fork_node.inputs.input)

        Connector.connect(fork_node.outputs[1], self.outputs.label.input)
    def create_topology(self):
        """
                                        +----------------+
            +-------------+             | dataset_switch |
            |             |             +--+-----+-------+
            |             v                |     |
            |  +----------+------------+   |     |
            |  | context_feedback_pass |   |     |
            |  +--------------------+--+   |     |
            |                       |      |     |
            |                       v      v     |
            |               +-------+------+--+  |
            |               | gate_input_join |  |
            |               +-------+---------+  |
            |                       |            |
            |                       v            |
            |              +--------+---------+  |
            |              | gate_input_noise |  |
            |              +--------+---------+  |
            |                       |            |
            |                       v            |
            |                   +---+--+         |
            |                   | gate |         |
            |                   +---+--+         |
            |                       |            |
            |                       v            |
            |               +-------+--------+   +--------+
            |               | format_context |   |        |
            |               +-------+--------+   |        |
            |                       |            v        |
            |                       |     +------+-----+  |
            |                       ---->-+ specialist |  |
            |                             +--+--------++  |
            |                                |        |   |
            +--------------------------------+        v   v
                                                   ++--------++
                                                   | accuracy |
                                                   +----------+
        """

        n_gate = SpatialPoolerFlockNode(
            ExpertParams(flock_size=self._params.flock_size,
                         n_cluster_centers=self._params.seq_count,
                         spatial=SpatialPoolerParams(
                             # input_size=3,
                             enable_learning=True,
                             buffer_size=self._params.gate_buffer_size,
                             batch_size=100,
                             learning_rate=0.2,
                             learning_period=10,
                             cluster_boost_threshold=100,
                             max_boost_time=200
                         ),
                         ),
            name="Gate"
        )
        self.add_node(n_gate)

        # Specialist
        n_specialist = SpecialistNodeGroup(SpecialistNodeGroupParams(
            flock_size=self._params.flock_size,
            n_symbols=len(self._params.symbols),
            gate_input_context_multiplier=self._params.gate_input_context_multiplier,
            gate_input_context_avg_window_size=self._params.gate_input_context_avg_window_size,
            seq_count=self._params.seq_count,
            convert_context_to_one_hot=self._params.convert_context_to_one_hot
        ))
        self.add_node(n_specialist)
        self._n_specialist = n_specialist

        n_context_feedback_pass = PassNode((self._params.flock_size, self._params.seq_count))
        n_gate_input_join = JoinNode(dim=1, n_inputs=2)
        n_gate_input_noise = RandomNoiseNode(RandomNoiseParams(amplitude=0.0001))
        n_format_context = SPFormatContextNodeGroup(self._params.seq_count, self._params.flock_size)

        self.add_node(n_context_feedback_pass)
        self.add_node(n_gate_input_join)
        self.add_node(n_gate_input_noise)
        self.add_node(n_format_context)

        # Dataset
        n_dataset_switch = DatasetSwitchNodeGroup(DatasetSwitchNodeGroupParams(
            dataset_params=DatasetAlphabetNodeGroupParams(
                flock_size=self._params.flock_size,
                symbols=self._params.symbols,
                seq_length=self._params.seq_length,
                seq_count=self._params.seq_count,
                seq_repeat=self._params.seq_repeat
            ),
            flock_split=self._params.flock_split
        ))

        self._n_dataset_switch = n_dataset_switch
        self.add_node(n_dataset_switch)

        # dataset to specialist
        Connector.connect(n_dataset_switch.outputs.output, n_specialist.inputs.input)
        # specialist to gate
        Connector.connect(n_specialist.outputs.context_feedback, n_context_feedback_pass.inputs.input, is_backward=True)
        Connector.connect(n_context_feedback_pass.outputs.output, n_gate_input_join.inputs[0])
        # dataset to gate
        Connector.connect(n_dataset_switch.outputs.sequence_id_one_hot, n_gate_input_join.inputs[1])
        Connector.connect(n_gate_input_join.outputs.output, n_gate_input_noise.inputs.input)
        Connector.connect(n_gate_input_noise.outputs.output, n_gate.inputs.sp.data_input)
        # gate to specialist
        Connector.connect(n_gate.outputs.sp.forward_clusters, n_format_context.inputs.input)
        Connector.connect(n_format_context.outputs.output, n_specialist.inputs.context_input)

        # Measuring accuracy
        # Fork
        n_fork_dataset = ForkNode(0, [self._params.flock_split, self._params.flock_size - self._params.flock_split])
        n_fork_prediction = ForkNode(0, [self._params.flock_split, self._params.flock_size - self._params.flock_split])
        self.add_node(n_fork_dataset)
        self.add_node(n_fork_prediction)
        Connector.connect(n_dataset_switch.outputs.output, n_fork_dataset.inputs.input)
        Connector.connect(n_specialist.outputs.output, n_fork_prediction.inputs.input)

        self._n_accuracy_single_1 = AccuracyNode(1, name='Accuracy single 1')
        self.add_node(self._n_accuracy_single_1)
        Connector.connect(n_fork_dataset.outputs[0], self._n_accuracy_single_1.inputs.input_a)
        Connector.connect(n_fork_prediction.outputs[0], self._n_accuracy_single_1.inputs.input_b)

        self._n_accuracy_single_2 = AccuracyNode(1, name='Accuracy single 2')
        self.add_node(self._n_accuracy_single_2)
        Connector.connect(n_fork_dataset.outputs[1], self._n_accuracy_single_2.inputs.input_a)
        Connector.connect(n_fork_prediction.outputs[1], self._n_accuracy_single_2.inputs.input_b)

        self._n_accuracy_1 = AccuracyNode(self._params.accuracy_average_steps, name='Accuracy 1')
        self.add_node(self._n_accuracy_1)
        Connector.connect(n_fork_dataset.outputs[0], self._n_accuracy_1.inputs.input_a)
        Connector.connect(n_fork_prediction.outputs[0], self._n_accuracy_1.inputs.input_b)

        self._n_accuracy_2 = AccuracyNode(self._params.accuracy_average_steps, name='Accuracy 2')
        self.add_node(self._n_accuracy_2)
        Connector.connect(n_fork_dataset.outputs[1], self._n_accuracy_2.inputs.input_a)
        Connector.connect(n_fork_prediction.outputs[1], self._n_accuracy_2.inputs.input_b)
Example #12
0
def test_sp_flock_node_accessor_types_and_dimensions():

    device = 'cuda'  # CPU not supported by SPFlock

    upper_bound = 107
    flock_input_size = upper_bound
    flock_size = 1

    num_cc = 21

    # define params
    sp_params = MnistSpTopology.get_sp_params(num_cluster_centers=num_cc,
                                              cluster_boost_threshold=1000,
                                              learning_rate=0.1,
                                              buffer_size=2 * 30,
                                              batch_size=30,
                                              input_size=flock_input_size,
                                              flock_size=flock_size,
                                              max_boost_time=1500)

    # random_node -> unsqueeze_node, sp_flock
    random_node = RandomNumberNode(upper_bound=upper_bound)
    unsqueeze_node = UnsqueezeNode(0)
    sp_node = SpatialPoolerFlockNode(sp_params.clone())

    Connector.connect(random_node.outputs.one_hot_output,
                      unsqueeze_node.inputs.input)
    Connector.connect(unsqueeze_node.outputs.output,
                      sp_node.inputs.sp.data_input)

    # update dimensions
    creator = AllocatingCreator(device=device)
    random_node.allocate_memory_blocks(creator)
    unsqueeze_node.allocate_memory_blocks(creator)
    sp_node.allocate_memory_blocks(creator)

    # make step
    random_node.step()
    unsqueeze_node.step()
    sp_node.step()

    # collect the results
    reconstruction = SpatialPoolerFlockNodeAccessor.get_reconstruction(sp_node)
    deltas = SpatialPoolerFlockNodeAccessor.get_sp_deltas(sp_node)
    boosting_durations = SpatialPoolerFlockNodeAccessor.get_sp_boosting_durations(
        sp_node)
    output_id = SpatialPoolerFlockNodeAccessor.get_output_id(sp_node)

    # check result properties
    assert type(reconstruction) is torch.Tensor
    assert type(deltas) is torch.Tensor
    assert type(boosting_durations) is torch.Tensor
    assert type(output_id) is int

    assert reconstruction.shape == (flock_size, flock_input_size)
    assert deltas.shape == (flock_size, num_cc, flock_input_size)
    assert boosting_durations.shape == (flock_size, num_cc)
    assert 0 <= output_id < num_cc

    # test the sp metrics
    delta = average_sp_delta(deltas)
    boosting_dur = average_boosting_duration(boosting_durations)

    nbc = num_boosted_clusters(boosting_durations)

    assert type(delta) is float
    assert type(boosting_dur) is float
    assert 0 <= boosting_dur <= 1000

    assert type(nbc) is float
    assert 0 <= nbc <= 1
Example #13
0
    def __init__(self,
                 expert_width: int = 1,
                 input_square_side: int = 64,
                 n_cluster_centers: int = 8,
                 stride: int = None,
                 training_phase_steps: int = 200,
                 testing_phase_steps: int = 800,
                 seed: int = 0):
        super().__init__(device='cuda')

        if stride is None:
            stride = expert_width

        self.training_phase_steps = training_phase_steps
        self.testing_phase_steps = testing_phase_steps
        self.testing_phase = True
        self.n_testing_phase = -1
        self.training_step = -1
        self._step_counter = 0
        assert (input_square_side - (expert_width - stride)) % stride == 0, \
            f'(input_square_side - (expert_width - stride)) ' \
                f'({(input_square_side - (expert_width - stride))}) must be divisible' \
                f' by stride ({stride})'
        self.n_experts_width = (input_square_side -
                                (expert_width - stride)) // stride
        self.input_square_side = input_square_side
        self.one_expert_lrf_width = expert_width

        self.sp_params = ExpertParams()
        self.sp_params.n_cluster_centers = n_cluster_centers
        self.sp_params.spatial.input_size = self.one_expert_lrf_width * self.one_expert_lrf_width * 3
        self.sp_params.flock_size = int(self.n_experts_width**2)
        self.sp_params.spatial.buffer_size = 510
        self.sp_params.spatial.batch_size = 500
        self.sp_params.compute_reconstruction = True

        self.reconstructed_data = torch.zeros(
            (self.input_square_side, self.input_square_side, 3),
            dtype=self.float_dtype,
            device=self.device)
        self.image_difference = torch.zeros(
            (self.input_square_side, self.input_square_side, 3),
            dtype=self.float_dtype,
            device=self.device)

        se_nav_params = DatasetSENavigationParams(
            SeDatasetSize.SIZE_64,
            sampling_method=SamplingMethod.RANDOM_SAMPLING)

        self._node_se_nav = DatasetSeNavigationNode(se_nav_params, seed=0)

        parent_rf_dims = Size2D(self.one_expert_lrf_width,
                                self.one_expert_lrf_width)
        self._node_lrf = ReceptiveFieldNode(
            (input_square_side, input_square_side, 3), parent_rf_dims,
            Stride(stride, stride))

        # necessary to clone the params, because the GUI changes them during the simulation (restart needed)
        self._node_spatial_pooler = SpatialPoolerFlockNode(
            self.sp_params.clone(), seed=seed)
        self._node_spatial_pooler_backup = SpatialPoolerFlockNode(
            self.sp_params.clone(), seed=seed)

        self.add_node(self._node_se_nav)
        self.add_node(self._node_lrf)
        self.add_node(self._node_spatial_pooler)
        self.add_node(self._node_spatial_pooler_backup)

        Connector.connect(self._node_se_nav.outputs.image_output,
                          self._node_lrf.inputs.input)
        Connector.connect(self._node_lrf.outputs.output,
                          self._node_spatial_pooler.inputs.sp.data_input)
        Connector.connect(
            self._node_lrf.outputs.output,
            self._node_spatial_pooler_backup.inputs.sp.data_input)

        self.se_nav_node = self._node_se_nav
Example #14
0
class SeNavLrfTopology(Topology):
    def __init__(self,
                 expert_width: int = 1,
                 input_square_side: int = 64,
                 n_cluster_centers: int = 8,
                 stride: int = None,
                 training_phase_steps: int = 200,
                 testing_phase_steps: int = 800,
                 seed: int = 0):
        super().__init__(device='cuda')

        if stride is None:
            stride = expert_width

        self.training_phase_steps = training_phase_steps
        self.testing_phase_steps = testing_phase_steps
        self.testing_phase = True
        self.n_testing_phase = -1
        self.training_step = -1
        self._step_counter = 0
        assert (input_square_side - (expert_width - stride)) % stride == 0, \
            f'(input_square_side - (expert_width - stride)) ' \
                f'({(input_square_side - (expert_width - stride))}) must be divisible' \
                f' by stride ({stride})'
        self.n_experts_width = (input_square_side -
                                (expert_width - stride)) // stride
        self.input_square_side = input_square_side
        self.one_expert_lrf_width = expert_width

        self.sp_params = ExpertParams()
        self.sp_params.n_cluster_centers = n_cluster_centers
        self.sp_params.spatial.input_size = self.one_expert_lrf_width * self.one_expert_lrf_width * 3
        self.sp_params.flock_size = int(self.n_experts_width**2)
        self.sp_params.spatial.buffer_size = 510
        self.sp_params.spatial.batch_size = 500
        self.sp_params.compute_reconstruction = True

        self.reconstructed_data = torch.zeros(
            (self.input_square_side, self.input_square_side, 3),
            dtype=self.float_dtype,
            device=self.device)
        self.image_difference = torch.zeros(
            (self.input_square_side, self.input_square_side, 3),
            dtype=self.float_dtype,
            device=self.device)

        se_nav_params = DatasetSENavigationParams(
            SeDatasetSize.SIZE_64,
            sampling_method=SamplingMethod.RANDOM_SAMPLING)

        self._node_se_nav = DatasetSeNavigationNode(se_nav_params, seed=0)

        parent_rf_dims = Size2D(self.one_expert_lrf_width,
                                self.one_expert_lrf_width)
        self._node_lrf = ReceptiveFieldNode(
            (input_square_side, input_square_side, 3), parent_rf_dims,
            Stride(stride, stride))

        # necessary to clone the params, because the GUI changes them during the simulation (restart needed)
        self._node_spatial_pooler = SpatialPoolerFlockNode(
            self.sp_params.clone(), seed=seed)
        self._node_spatial_pooler_backup = SpatialPoolerFlockNode(
            self.sp_params.clone(), seed=seed)

        self.add_node(self._node_se_nav)
        self.add_node(self._node_lrf)
        self.add_node(self._node_spatial_pooler)
        self.add_node(self._node_spatial_pooler_backup)

        Connector.connect(self._node_se_nav.outputs.image_output,
                          self._node_lrf.inputs.input)
        Connector.connect(self._node_lrf.outputs.output,
                          self._node_spatial_pooler.inputs.sp.data_input)
        Connector.connect(
            self._node_lrf.outputs.output,
            self._node_spatial_pooler_backup.inputs.sp.data_input)

        self.se_nav_node = self._node_se_nav

    def step(self):
        if not self._is_initialized:
            self._assign_ids_to_nodes(self._id_generator)
            self.order_nodes()
            self._update_memory_blocks()
            self.remove_node(self._node_spatial_pooler_backup)
        #     # self.remove_node(self._node_mnist_test)

        if not self.testing_phase and self._step_counter % self.training_phase_steps == 0:
            self.testing_phase = True
            self.n_testing_phase += 1
            self.set_testing_model()
            self._step_counter = 0
        elif self.testing_phase and self._step_counter % self.testing_phase_steps == 0:
            self.testing_phase = False
            self.set_training_model()
            self._step_counter = 0
        self._step_counter += 1

        if not self.testing_phase:
            self.training_step += 1

        if not self._is_initialized:
            self._assign_ids_to_nodes(self._id_generator)
            self.order_nodes()
            self._is_initialized = True

        super().step()

        flock_reconstruction = self._node_spatial_pooler.outputs.sp.current_reconstructed_input
        # noinspection PyProtectedMember
        self.reconstructed_data.copy_(
            self._node_lrf.inverse_projection(flock_reconstruction.tensor))

        self.image_difference.copy_(self.reconstructed_data -
                                    self._node_se_nav.outputs[0].tensor)

    def set_training_model(self):
        # noinspection PyProtectedMember
        self._node_spatial_pooler_backup._unit.copy_to(
            self._node_spatial_pooler._unit)
        self._node_spatial_pooler.switch_learning(True)

    def set_testing_model(self):
        # noinspection PyProtectedMember
        self._node_spatial_pooler._unit.copy_to(
            self._node_spatial_pooler_backup._unit)
        self._node_spatial_pooler.switch_learning(False)