Exemplo n.º 1
0
 def predict(self, X_test):
     self.check_fitted()
     if X_test.ndim != 2:
         raise Exception("X_test should have 2 dimensions! X_dim:{}".format(
             X_test.ndim))
     X_test = np.float32(GPRNP.check_array(X_test))
     test_size = X_test.shape[0]
     arr_offset = 0
     length_scale = self.length_scale
     yhats = np.zeros([test_size, 1])
     sigmas = np.zeros([test_size, 1])
     eips = np.zeros([test_size, 1])
     while arr_offset < test_size:
         if arr_offset + self.batch_size_ > test_size:
             end_offset = test_size
         else:
             end_offset = arr_offset + self.batch_size_
         xt_ = X_test[arr_offset:end_offset]
         K2 = self.magnitude * np.exp(-ed(self.X_train, xt_) / length_scale)
         K3 = self.magnitude * np.exp(-ed(xt_, xt_) / length_scale)
         K2_trans = np.transpose(K2)
         yhat = np.matmul(K2_trans, np.matmul(self.K_inv, self.y_train))
         sigma = np.sqrt(np.diag(K3 - np.matmul(K2_trans, np.matmul(self.K_inv, K2)))) \
             .reshape(xt_.shape[0], 1)
         u = (self.y_best - yhat) / sigma
         phi1 = 0.5 * special.erf(u / np.sqrt(2.0)) + 0.5
         phi2 = (1.0 / np.sqrt(2.0 * np.pi)) * np.exp(np.square(u) * (-0.5))
         eip = sigma * (u * phi1 + phi2)
         yhats[arr_offset:end_offset] = yhat
         sigmas[arr_offset:end_offset] = sigma
         eips[arr_offset:end_offset] = eip
         arr_offset = end_offset
     GPRNP.check_output(yhats)
     GPRNP.check_output(sigmas)
     return GPRResult(yhats, sigmas)
Exemplo n.º 2
0
 def predict(self, X_test):
     self.check_fitted()
     if X_test.ndim != 2:
         raise Exception("X_test should have 2 dimensions! X_dim:{}"
                         .format(X_test.ndim))
     X_test = np.float32(GPRNP.check_array(X_test))
     test_size = X_test.shape[0]
     arr_offset = 0
     length_scale = self.length_scale
     yhats = np.zeros([test_size, 1])
     sigmas = np.zeros([test_size, 1])
     eips = np.zeros([test_size, 1])
     while arr_offset < test_size:
         if arr_offset + self.batch_size_ > test_size:
             end_offset = test_size
         else:
             end_offset = arr_offset + self.batch_size_
         xt_ = X_test[arr_offset:end_offset]
         K2 = self.magnitude * np.exp(-ed(self.X_train, xt_) / length_scale)
         K3 = self.magnitude * np.exp(-ed(xt_, xt_) / length_scale)
         K2_trans = np.transpose(K2)
         yhat = np.matmul(K2_trans, np.matmul(self.K_inv, self.y_train))
         sigma = np.sqrt(np.diag(K3 - np.matmul(K2_trans, np.matmul(self.K_inv, K2)))) \
             .reshape(xt_.shape[0], 1)
         u = (self.y_best - yhat) / sigma
         phi1 = 0.5 * special.erf(u / np.sqrt(2.0)) + 0.5
         phi2 = (1.0 / np.sqrt(2.0 * np.pi)) * np.exp(np.square(u) * (-0.5))
         eip = sigma * (u * phi1 + phi2)
         yhats[arr_offset:end_offset] = yhat
         sigmas[arr_offset:end_offset] = sigma
         eips[arr_offset:end_offset] = eip
         arr_offset = end_offset
     GPRNP.check_output(yhats)
     GPRNP.check_output(sigmas)
     return GPRResult(yhats, sigmas)
    def sens_range(self, i, kh_pos):
        r"""Get the sensing distance for ith krill individual.

        :param i: Index of ith krill individual
        :param kh_pos: The position of krill herd
        :return: The sensing distance for ith krill individual
        """
        return sum([ed(kh_pos[i], kh_pos[j]) for j in range(self.population)]) / (self.nn * self.population)
Exemplo n.º 4
0
	def funX(self, x, y):
		r"""Get x values.

		Args:
			x (numpy.ndarray): First krill/individual.
			y (numpy.ndarray): Second krill/individual.

		Returns:
			numpy.ndarray: --
		"""
		return ((y - x) + self.epsilon) / (ed(y, x) + self.epsilon)
Exemplo n.º 5
0
	def sensRange(self, ki, KH):
		r"""Calculate sense range for selected individual.

		Args:
			ki (int): Selected individual.
			KH (numpy.ndarray): Krill heard population.

		Returns:
			float: Sense range for krill.
		"""
		return sum([ed(KH[ki], KH[i]) for i in range(self.NP)]) / (self.nn * self.NP)
    def get_neighbors(self, i, ids, kh_pos):
        r"""Get neighbors of ith krill individual.

        :param i: Index of ith krill individual
        :param ids: Sensing distance of ith krill individual
        :param kh_pos: The position of krill herd
        :return: {list} - Neighbors of ith krill individual.
        """
        neighbors = list()
        n = 0
        for j in range(self.population):
            if n < self.nn and j != i and ed(kh_pos[i], kh_pos[j]) < ids:
                n += 1
                neighbors.append(j)
        return neighbors
Exemplo n.º 7
0
	def getNeighbours(self, i, ids, KH):
		r"""Get neighbours.

		Args:
			i (int): Individual looking for neighbours.
			ids (float): Maximal distance for being a neighbour.
			KH (numpy.ndarray): Current population.

		Returns:
			numpy.ndarray: Neighbours of krill heard.
		"""
		N = list()
		for j in range(self.NP):
			if j != i and ids > ed(KH[i], KH[j]): N.append(j)
		if not N: N.append(self.randint(self.NP))
		return asarray(N)
Exemplo n.º 8
0
 def fit(self, X_train, y_train, ridge=0.01):
     self._reset()
     X_train, y_train = self.check_X_y(X_train, y_train)
     if X_train.ndim != 2 or y_train.ndim != 2:
         raise Exception("X_train or y_train should have 2 dimensions! X_dim:{}, y_dim:{}"
                         .format(X_train.ndim, y_train.ndim))
     self.X_train = np.float32(X_train)
     self.y_train = np.float32(y_train)
     sample_size = self.X_train.shape[0]
     if np.isscalar(ridge):
         ridge = np.ones(sample_size) * ridge
     assert isinstance(ridge, np.ndarray)
     assert ridge.ndim == 1
     K = self.magnitude * np.exp(-ed(self.X_train, self.X_train) / self.length_scale) \
         + np.diag(ridge)
     K_inv = np.linalg.inv(K)
     self.K = K
     self.K_inv = K_inv
     self.y_best = np.min(y_train)
     return self
Exemplo n.º 9
0
 def fit(self, X_train, y_train, ridge=0.01):
     self._reset()
     X_train, y_train = self.check_X_y(X_train, y_train)
     if X_train.ndim != 2 or y_train.ndim != 2:
         raise Exception(
             "X_train or y_train should have 2 dimensions! X_dim:{}, y_dim:{}"
             .format(X_train.ndim, y_train.ndim))
     self.X_train = np.float32(X_train)
     self.y_train = np.float32(y_train)
     sample_size = self.X_train.shape[0]
     if np.isscalar(ridge):
         ridge = np.ones(sample_size) * ridge
     assert isinstance(ridge, np.ndarray)
     assert ridge.ndim == 1
     K = self.magnitude * np.exp(-ed(self.X_train, self.X_train) / self.length_scale) \
         + np.diag(ridge)
     K_inv = np.linalg.inv(K)
     self.K = K
     self.K_inv = K_inv
     self.y_best = np.min(y_train)
     return self
Exemplo n.º 10
0
 def sensRange(self, ki, KH):
     return sum([ed(KH[ki], KH[i])
                 for i in range(self.N)]) / (self.nn * self.N)
Exemplo n.º 11
0
 def funX(self, x, y):
     return ((y - x) + self.epsilon) / (ed(y, x) + self.epsilon)
Exemplo n.º 12
0
 def getNeigbors(self, i, ids, KH):
     N = list()
     for j in range(self.N):
         if j != i and ids > ed(KH[i], KH[j]): N.append(j)
     return N
Exemplo n.º 13
0
 def color_calculation(self, one_pos):
     dis_min = inf
     for ref_point in self.ref_points:
         dis = ed(ref_point, one_pos)
         dis_min = dis if dis < dis_min else dis_min
     return 'g' if dis_min < self.threshold else 'g'
Exemplo n.º 14
0
	def funX(self, x, y): return ((y - x) + self.epsilon) / (ed(y, x) + self.epsilon)

	def funK(self, x, y, b, w): return ((x - y) + self.epsilon) / ((w - b) + self.epsilon)
def SFc_means(attr_vector,
              k,
              seeds,
              seeds_classes,
              centroids,
              threshold,
              z,
              total_grouping=0):
    """
    A function, k-means like, that generate k groups in the data entry based
     on pre-estabilished centroids and a precision threshold.

    Args:
        attr_vector: atribute vector of each element (ndarray [MxN]; M is the number of attributes and N the number of elements);
        k: quantity of groups (int);
        seeds: rodulated elements used as first centroids (ndarray [MxZ], M is the number of attributes and Z the number of seeds);
        seeds_classes: array with atributes vector classes; (ndarray [N]; N is the number of elements)
        centroids: position of seeds (ndarray; [Z], Z is the number of seeds, each element represents a position at attr_vector);
        threshold: percetage used to calculate the rotulation precision;
        z: nebulosity level;
        total_grouping: used to set the grouping type (int; 0 to parcial grouping (based on threshold) and 1 to total grouping);
    
    Returns:
        An [N] array of labels, that N is the number of entries.
    """
    def verify_equality(vector, threshold):
        """
        Verify if the first threshold number of vector elemets have the same value.

        Args:
            vector: a list with numbers (list);
            threshold: the number of vector elements to verify (int).
        
        Returns:
            True if the first threshold number of vector elements have the same value and False if they have not.
        """
        actual_value = vector[0]
        for i in range(1, threshold):
            if vector[i] != actual_value:
                return False
            actual_value = vector[i]
        return True

    # generating unique array to avoid repeated data and the positions of the original array elements in the new unique elements array
    histogram, positions_histogram = np.unique(attr_vector,
                                               axis=0,
                                               return_inverse=True)

    # creating an zeros array to storage the future classes (one for each element from attr_vector)
    labels = np.zeros((len(histogram)))

    hist_centroids = np.zeros(np.unique(seeds, axis=0).shape[0])
    hist_seeds = np.zeros(np.unique(seeds, axis=0).shape)
    hist_seeds_classes = np.zeros(np.unique(seeds, axis=0).shape[0])
    counter = 0

    for i in range(len(centroids)):
        histogram_position = positions_histogram[centroids[i]]
        labels[histogram_position] = seeds_classes[i]
        if len(np.nonzero(hist_centroids == positions_histogram)[0]) == 0:
            hist_centroids[counter] = histogram_position
            hist_seeds[counter, :] = seeds[i, :]
            hist_seeds_classes[counter] = seeds_classes[i]
            counter += 1
    expoent = 1 / (z - 1)

    #for each element from attr_vector
    for i in range(len(labels)):
        if len(np.nonzero(
                hist_centroids == i)[0]) == 0:  #but only the not labeled ones
            pertinency_levels = []
            distances = []

            #calculate the euclidean distances between the element and all the centroids
            for j in range(len(hist_centroids)):
                distances.append(ed(histogram[i], hist_seeds[j]))

            #calculate the pertinency level of the element to every centroid
            for j in range(len(hist_centroids)):
                mu = 0

                for k in range(len(hist_centroids)):
                    mu += (distances[j] / distances[k])**expoent

                mu = mu**-1

                pertinency_levels.append([mu, hist_seeds_classes[j]])

            #sorting pertinency_levels list in descending order
            pertinency_levels.sort(key=lambda x: x[0], reverse=True)

            pertinency_levels = np.asarray(pertinency_levels)

            if total_grouping == 0:
                #calculating absolute threshold based on percetage threshold and the provided centroids number
                absolute_threshold = mt.floor(len(centroids) * threshold)

                #verify if the first (absolute_threshold) items are from the same group
                #[item[0] for item in pertinency_levels]
                if verify_equality(pertinency_levels[:, 1],
                                   absolute_threshold):
                    #if yes, the unlabeled data label are the same from the highest pertinency level
                    labels[i] = pertinency_levels[0][1]
            else:
                labels[i] = pertinency_levels[0][1]

    final_labels = np.zeros(len(attr_vector))
    for i in range(len(final_labels)):
        final_labels[i] = labels[positions_histogram[i]]

    return final_labels