Exemplo n.º 1
0
 def _distance_between_cluster_and_point(self, weight, sample):
     """
     Calculate the distance between given cluster weight and point sample.
     """
     front, back = self._split_weight_nch(weight)
     size = l2_norm(np.subtract(back, front)) / 2
     distance_from_center = l2_norm(
         np.subtract(sample,
                     np.add(front, back) / 2))
     distance = np.maximum(np.subtract(distance_from_center, size),
                           np.zeros(self.num_channel))
     return np.array(distance)
Exemplo n.º 2
0
 def _distance_between_clusters(self, w_i, w_j):
     """
     Calculate the distance between given clusters w_i and w_j.
     """
     front_i, back_i = self._split_weight_nch(w_i)
     front_j, back_j = self._split_weight_nch(w_j)
     size_i, size_j = l2_norm(np.subtract(back_i, front_i)) / 2, l2_norm(
         np.subtract(back_j, front_j)) / 2
     dist_from_center = l2_norm(
         np.subtract(
             np.add(front_i, back_i) / 2,
             np.add(front_j, back_j) / 2))
     distance = np.maximum(
         np.subtract(np.subtract(dist_from_center, size_i), size_j),
         np.zeros(self.num_channel))
     return np.array(distance)
Exemplo n.º 3
0
    def _grouping(self):
        """
        Proceed grouping phase to group clusters which need to be united.
        """
        while True:
            resonance, idx_s, idx_l, w_ij_1_list, w_ij_2_list = self._condition_for_grouping(
            )
            if not resonance:
                break

            # merge two clusters
            self.w[idx_s] = np.array([
                np.hstack((w_ij_1_list[ch], w_ij_2_list[ch]))
                for ch in range(self.num_channel)
            ])

            # reconnect nodes previously connected to "idx_l" to "idx_s"
            to_delete_group, to_add_group = [], []
            for check in self.group:
                if idx_l in check:
                    item1, item2 = check
                    reconnection = {item1, item2}
                    _, _ = reconnection.remove(idx_l), reconnection.add(idx_s)
                    reconnection = sorted(reconnection)
                    to_delete_group.append(
                        (item1, item2))  # self-connection considered
                    if len(reconnection) == 2:
                        # update the synaptic strength
                        item1, item2 = reconnection
                        T = 0
                        for ch in range(self.num_channel):
                            subtraction = np.array(self.w[item1][ch] -
                                                   self.w[item2][ch])
                            center_of_mass_diff = (subtraction[:self.dim] +
                                                   subtraction[self.dim:]) / 2
                            T += np.exp(-self.alpha *
                                        l2_norm(center_of_mass_diff))
                        to_add_group.append([(item1, item2), T])

            # delete the collected items from group and update w and n_category
            self._update_groups(to_add_group, to_delete_group)
            self.w = np.delete(self.w, idx_l, axis=0)
            self.n_category -= 1

            # update indices > idx_l
            _, _ = to_delete_group.clear(), to_add_group.clear()

            for check in self.group:
                if any([c > idx_l for c in check]):
                    item1, item2 = check
                    residuals = [-1 if item > idx_l else 0 for item in check]
                    item1, item2 = item1 + residuals[0], item2 + residuals[1]
                    to_add_group.append([(item1, item2), self.group[check]])
                    to_delete_group.append(check)

            self._update_groups(to_add_group, to_delete_group)
Exemplo n.º 4
0
    def _add_group(self, v_nodes, sample, condition):
        """
        Proceed necessary calculations for grouping
        """
        # Refer to the paper. front/back/center_of_mass should be vector
        # Mixed and calculate from vector to T in the T = np.exp ~~~ thing.
        center_of_mass_list = []
        if all(condition):
            to_connect = np.copy(v_nodes)
            front = np.array([
                self._split_weight_nch(
                    self.extract_append(self.w, v_nodes, ch), sample[ch])[0]
                for ch in range(self.num_channel)
            ])
            back = np.array([
                self._split_weight_nch(
                    self.extract_append(self.w, v_nodes, ch), sample[ch])[1]
                for ch in range(self.num_channel)
            ])
            center_of_mass_list = (front + back) / 2

        else:
            to_connect = np.copy(v_nodes)
            to_connect = np.hstack((to_connect, self.n_category - 1))

            front = np.array([
                self._split_weight_nch(
                    self.extract_append(self.w, v_nodes, ch), sample[ch])[0]
                for ch in range(self.num_channel)
            ])
            back = np.array([
                self._split_weight_nch(
                    self.extract_append(self.w, v_nodes, ch), sample[ch])[1]
                for ch in range(self.num_channel)
            ])
            center_of_mass_list = (front + back) / 2
            sample_list = np.array([sample for ch in range(self.num_channel)])
            center_of_mass_list = np.concatenate(
                (center_of_mass_list, sample_list), axis=1)

        for first in range(len(to_connect)):
            for second in range(first + 1, len(to_connect)):
                smaller, larger = sorted(
                    [to_connect[first], to_connect[second]])
                # new connections get added (first condition)
                # and synaptic strengths get updated (second condition)
                T = np.sum([
                    np.exp(-self.alpha *
                           l2_norm(center_of_mass_list[ch][first] -
                                   center_of_mass_list[ch][second]))
                    for ch in range(self.num_channel)
                ])

                if not T == 0 or v_nodes[0] in (smaller, larger):
                    self.group[(smaller, larger)] = T
Exemplo n.º 5
0
    def _distance(self, sample_ch, weight_ch):
        """
        sample_ch: channel element of sample.
        weight_ch: channel element of weight
        return: the distance between sample_ch and weight_ch

        For example, if sample and weight has multiple channels, this function can be called as
        _distance(sample[0], weight[0]), where sample[0] and weight[0] regards first channel
        element of sample and weight.
        """
        f, b, w1, w2 = self._split_weight_ch(weight_ch, sample_ch)
        distance = l2_norm(w1 - f + w2 - b)
        return distance
Exemplo n.º 6
0
 def _rdrn_activation(self, sample):
     """
     Calculate the resonance of the sample to each weights in sDRN algorithm manner
     """
     activation = []
     for category in range(self.n_category):
         dist_glob = np.array([
             l2_norm(
                 np.subtract(self.wg[ch][self.dim:],
                             self.wg[ch][:self.dim]))
             for ch in range(self.num_channel)
         ])
         temp_activation = np.sum([
             np.exp(-self.alpha *
                    self._distance(sample[ch], self.w[category][ch]) /
                    dist_glob[ch]) for ch in range(self.num_channel)
         ])
         activation.append(temp_activation)
     return np.array(activation)
Exemplo n.º 7
0
 def _learning_condition(self, sample, idx):
     """
     Calculate the flag to whether proceed grouping process or not.
     """
     weight = self.w[idx]
     volume_orig = self._volume_of_cluster(weight)
     adaptive_lr = 2 * np.divide(
         volume_orig,
         (self.dim * (1 - self.rho) * self._volume_of_cluster(self.wg)))
     adaptive_lr = np.minimum(adaptive_lr, self.lr)
     if adaptive_lr == 0:
         adaptive_lr = 0.1
     dist_glob = np.array([
         l2_norm(np.subtract(self.wg[ch][self.dim:],
                             self.wg[ch][:self.dim]))
         for ch in range(self.num_channel)
     ])
     condition = self._distance_between_cluster_and_point(
         weight, sample) < self.dist * dist_glob
     return np.array(condition), np.array(adaptive_lr)
Exemplo n.º 8
0
    def _grouping(self, idx):
        """
        Proceed grouping phase to group clusters which need to be united.
        Conditions for grouping is calculated, and finally proceed grouping or not.
        """
        # find which cluster to group with idx-th cluster
        to_cluster, max_iov = None, 0
        for cluster in range(self.n_category):
            if cluster == idx:
                continue
            IoV, UoV = self._intersection_of_volume(self.w[cluster],
                                                    self.w[idx])

            if all(UoV < self.dim *
                   (1 - self.rho) * self._volume_of_cluster(self.wg)):
                distance = self._distance_between_clusters(
                    self.w[cluster], self.w[idx])
                dist_glob = np.array([
                    l2_norm(
                        np.extract(self.wg[ch][self.dim:],
                                   self.wg[ch][:self.dim]))
                    for ch in range(self.num_channel)
                ])
                sum = np.sum(IoV)
                a = all(IoV > self.iov)
                b = sum > max_iov
                c = all(distance < np.multiply(self.dist, dist_glob))
                d = all(IoV > self.iov / 2)
                temp_cluster = self._union_of_clusters(self.w[idx],
                                                       self.w[cluster])
                cluster_size_check = self._check_cluster_size_vig(temp_cluster)
                e = all(cluster_size_check)

                if (((a and b) or c) and d) and e:
                    to_cluster, max_iov = cluster, sum

        if to_cluster:
            self.n_category -= 1
            self.w[idx] = self._union_of_clusters(self.w[idx],
                                                  self.w[to_cluster])
            self.w = np.delete(self.w, to_cluster, axis=0)