예제 #1
0
def ParamCooccurence(HistomatCoo):
    #Calcul de l'energie
    energie = greycoprops(HistomatCoo, 'energy')
    contraste = greycoprops(HistomatCoo, 'contrast')
    dissimilarite = greycoprops(HistomatCoo, 'dissimilarity')
    homogeneite = greycoprops(HistomatCoo, 'homogeneity')
    correlation = greycoprops(HistomatCoo, 'correlation')
    return energie, contraste, dissimilarite, homogeneite, correlation
예제 #2
0
    def compute(self, image):
        """
        Compute the GLCM features.
        """

        assert (image.ndim == 2)
        w, h = image.shape

        nw = int(w / self.wsize_)
        nh = int(h / self.wsize_)

        nf = len(self.which_feats_)

        ft = np.zeros((nf, nw * nh))  # features will be on rows
        k = 0
        for x in np.arange(0, nw):
            for y in np.arange(0, nh):
                x0, y0 = x * self.wsize_, y * self.wsize_
                x1, y1 = x0 + self.wsize_, y0 + self.wsize_

                glcm = greycomatrix(image[y0:y1, x0:x1], self.dist_,
                                    self.theta_, self.levels_, self.symmetric_,
                                    self.normed_)
                ft[:, k] = np.array(
                    [greycoprops(glcm, f)[0, 0] for f in self.which_feats_])
                k += 1

        res = {}
        k = 0
        for f in self.which_feats_:
            res[f] = ft[k, :]
            k += 1

        return res
예제 #3
0
    def compute(self, image):
        """
        Compute the GLCM features.
        """

        assert (image.ndim == 2)
        w, h = image.shape

        nw = int(w / self.wsize_)
        nh = int(h / self.wsize_)

        nf = len(self.which_feats_)

        ft = np.zeros((nf, nw * nh))  # features will be on rows
        k = 0
        for x in np.arange(0, nw):
            for y in np.arange(0, nh):
                x0, y0 = x * self.wsize_, y * self.wsize_
                x1, y1 = x0 + self.wsize_, y0 + self.wsize_

                glcm = greycomatrix(image[y0:y1, x0:x1],
                                    self.dist_, self.theta_, self.levels_,
                                    self.symmetric_, self.normed_)
                ft[:, k] = np.array([greycoprops(glcm, f)[0, 0] for f in self.which_feats_])
                k += 1

        res = {}
        k = 0
        for f in self.which_feats_:
            res[f] = ft[k, :]
            k += 1

        return res
예제 #4
0
 def GetPropsFromMatrix(self):
     mtrx = self.GetMatrix()
     props = []
     for c in list(ft.greycoprops(mtrx, prop='contrast')):
         for i in c:
             props.append(i)
     for c in list(ft.greycoprops(mtrx, prop='correlation')):
         for i in c:
             props.append(i)
     for c in list(ft.greycoprops(mtrx, prop='energy')):
         for i in c:
             props.append(i)
     for c in list(ft.greycoprops(mtrx, prop='homogeneity')):
         for i in c:
             props.append(i)
     return props
예제 #5
0
파일: model.py 프로젝트: guancodes/palmtree
def _glcm_measures(X):
    measures = []
    glcm = greycomatrix(X, [1], [0], levels=8, normed=True)
    for p in ('contrast', 'dissimilarity', 'homogeneity', 'energy',
              'correlation', 'ASM'):
        res = greycoprops(glcm, p)
        measures.extend(list(res.reshape(res.size)))
    return measures
예제 #6
0
def feature_build(img):
	from skimage.feature.texture import greycoprops, greycomatrix, local_binary_pattern
	from skimage.color import rgb2gray
	img = np.asarray(rgb2gray(img.numpy()), dtype=np.uint8)
	mat = greycomatrix(img, [1, 2], [0, np.pi/2], levels=4, normed=True, symmetric=True)
	features = []
	if (True):
		features.append(greycoprops(mat, 'contrast'))
		features.append(greycoprops(mat, 'dissimilarity'))
		features.append(greycoprops(mat, 'homogeneity'))
		#features.append(greycoprops(mat, 'energy'))
		#features.append(greycoprops(mat, 'correlation'))
		features = np.concatenate(features)
	else:
		radius = 2
		features = local_binary_pattern(img, 8*radius, radius, method='default') #'ror', 'uniform', 'var'
	feature = features.flatten()
	return torch.tensor(feature).float()
def getTextureMetrics(glcm, config):

    values = []

    # compute glcm texture metrics
    for metric in config['metrics']:
        values = np.concatenate((values, greycoprops(glcm, metric).flatten()))

    return values
def get_features(image):
    glcm = greycomatrix(image,
                        distances=[5],
                        angles=[0],
                        levels=256,
                        symmetric=True,
                        normed=True)

    contrast = np.array(greycoprops(glcm, 'contrast'))
    dissimilarity = np.array(greycoprops(glcm, 'dissimilarity'))
    homogeneity = np.array(greycoprops(glcm, 'homogeneity'))
    energy = np.array(greycoprops(glcm, 'energy'))
    correlation = np.array(greycoprops(glcm, 'correlation'))
    ASM = np.array(greycoprops(glcm, 'ASM'))
    listFeatures = [
        contrast, dissimilarity, homogeneity, energy, correlation, ASM
    ]
    return listFeatures
    def compute_feature_vector(self, patch):
        #GLCMmat = self.GLCM(patch,self.angle,self.distance,sym = True,norm = True)
        #patch = patch.astype(np.uint8)
        range_patch = patch.max() - patch.min()
        shape = patch.shape
        if (range_patch == 0):
            range_patch = range_patch + 1
        patch_scaled = sklearn.preprocessing.minmax_scale(
            patch.ravel(), feature_range=(0, range_patch)).reshape(shape)
        patch_scaled = patch_scaled.astype('uint8')
        M = greycomatrix(image=patch_scaled,
                         distances=[self.distance],
                         angles=[self.angle],
                         levels=range_patch + 1,
                         symmetric=True,
                         normed=True)
        GLCM = np.squeeze(M, axis=2)
        GLCM = np.squeeze(GLCM, axis=2)

        self.feature_vector[0] = self.ASM(GLCM)
        self.feature_vector[1] = self.contrast(GLCM)
        self.feature_vector[2] = self.dissimilarity(GLCM)
        self.feature_vector[3] = self.homogeneity(GLCM)
        self.feature_vector[4] = self.energy(GLCM)
        self.feature_vector[5] = self.entropy(GLCM)
        self.feature_vector[6] = self.svar(GLCM)
        self.feature_vector[7] = greycoprops(M, prop='correlation')
        self.feature_vector[8] = self.sum_avg(GLCM)
        self.feature_vector[9] = self.sum_entropy(GLCM)
        self.feature_vector[10] = self.dif_entropy(GLCM)
        self.feature_vector[11] = self.clustershade(GLCM)
        self.feature_vector[12] = self.clusterprom(GLCM)

        print(self.i)
        self.i = self.i + 1
        return self.feature_vector
예제 #10
0
def Maskgenerator(generatorfile_image,
                  generatorfile_GT,
                  Imgoverlay=True,
                  GToverlay=False):
    real_image_stack = generatorfile_image[0]
    real_GT_stack = generatorfile_GT[0]
    for i in range(0, file_amount):
        real_image = real_image_stack[i, :, :, :]
        image = real_image[:, :, 0]
        normal_image = real_image[:, :, 0]
        # plt.imshow(image, interpolation='none', cmap='gray')

        fig = plt.figure(figsize=(10, 8), dpi=300)
        ax1 = fig.add_subplot(1, 2, 1)
        ax1.set_xlim([0, 512])
        ax1.set_ylim([512, 0])
        ax2 = fig.add_subplot(1, 2, 2)

        if Imgoverlay == True:
            ax1.imshow(image, interpolation='none', cmap='gray')

        predictions = model.predict(real_image_stack)
        two = predictions[i, :, :, 0]
        two = np.where(two > 0.4, 1, 0)
        two = two.astype(np.uint8)
        # plt.imshow(two)
        # plt.savefig("Predict_Only"+str(i)+".png")

        GapImage = GapFill(two, i)

        RemovedImage = RemoveSmall(GapImage, i)
        # labeled_mask = measure.label(RemovedImage, connectivity=2)
        # ilm_mask = (labeled_mask==1)
        #
        # Pre_seperated_mask = (labeled_mask==2).astype(np.uint8)
        # Pre_seperated_mask = Pre_seperated_mask.astype(np.float32)

        empty_array, path_top, path_bottom_dist = Costfunction(
            image=RemovedImage)
        drusen_array, gradient_drusen, avarage_array, image_drusen, histogram_plot = HistoDrusen(
            top_oned_array=path_top,
            bottom_oned_array=path_bottom_dist,
            imagearray=normal_image,
            i=i)

        ax2.set_xlim([1, 256])
        ax2.set_ylim([0, 500])
        ax2.plot(histogram_plot)

        x = histogram_plot[:, 0]
        print(x)
        peaks, properties = find_peaks(x, prominence=1, width=3)
        ax2.plot(peaks, x[peaks], "x")
        ax2.vlines(x=peaks,
                   ymin=x[peaks] - properties["prominences"],
                   ymax=x[peaks],
                   color="C1")
        ax2.hlines(y=properties["width_heights"],
                   xmin=properties["left_ips"],
                   xmax=properties["right_ips"],
                   color="C1")

        # drusenfinder = DrusenFinder(top_oned_array=top_oned_array, bottom_oned_array=bottom_oned_array, drusenarray=)
        # drusenfinder = np.ma.masked_where(drusenfinder == 0, drusenfinder)
        # drusenarray, gradient_drusen, avarage_array = HistoDrusen(top_oned_array=top_oned_array, bottom_oned_array=bottom_oned_array, imagearray=image, i=i)

        # ILM_mask = (labeled_mask==1).astype(np.uint8)
        # ILM_mask = np.ma.masked_where(ILM_mask == 0, ILM_mask)
        # OBM_mask = OBM_RPE_seperated_mask[:,:,0]
        # OBM_mask = np.ma.masked_where(OBM_mask == 0, OBM_mask)
        #
        # #
        # RPE_mask = OBM_RPE_seperated_mask[:,:,1]
        # RPE_mask = np.ma.masked_where(RPE_mask == 0, RPE_mask)

        if GToverlay == True:
            GT = real_GT_stack[i, :, :, 0]
            GT = GT.astype(np.uint8)
            data_mask = np.ma.masked_where(GT == 0, GT)
            plt.imshow(data_mask,
                       interpolation='none',
                       cmap='brg',
                       alpha=0.5,
                       vmin=0)

        total_drusen = 0
        for m in range(0, len(avarage_array)):
            col_avarage = avarage_array[m]
            if col_avarage >= 0:
                total_drusen = total_drusen + col_avarage

        mean = np.zeros((file_amount), dtype=np.float32)
        std = np.zeros((file_amount), dtype=np.float32)
        var = np.zeros((file_amount), dtype=np.float32)
        contrast = np.zeros((file_amount), dtype=np.float32)
        homogeneity = np.zeros((file_amount), dtype=np.float32)
        energy = np.zeros((file_amount), dtype=np.float32)

        mean[i] = np.mean(histogram_plot, dtype=np.float32)
        std[i] = np.std(histogram_plot, dtype=np.float32)
        var[i] = np.var(histogram_plot, dtype=np.float32)
        drusen_array = np.ma.masked_where(drusen_array == 0, drusen_array)

        distances = [1, 5, 10, 15, 20]
        angles = [0, np.pi / 4, np.pi / 2, 3 * np.pi / 4]
        properties = ["contrast", "homogeneity", "energy"]

        glcm = greycomatrix(drusen_array,
                            distances=distances,
                            angles=angles,
                            levels=256,
                            symmetric=True,
                            normed=True)
        contrast = greycoprops(glcm, prop="contrast")
        homogeneity = greycoprops(glcm, prop="homogeneity")
        energy = greycoprops(glcm, prop="energy")
        correlation = greycoprops(glcm, prop="correlation")
        dissimilarity = greycoprops(glcm, prop="dissimilarity")
        '''
        Average Drusen Height Calc
        '''
        avarage_rpeheight = np.average(avarage_array)

        #print(texture)
        #print(histogram_plot)

        # # topvalue = np.amax(avarage_array)
        #
        empty_array = np.ma.masked_where(empty_array == 0, empty_array)
        ax1.imshow(empty_array, interpolation='none', alpha=0.8, cmap="brg")
        # ilm_mask = np.ma.masked_where(ilm_mask == 0, ilm_mask)
        # plt.imshow(ilm_mask, interpolation='none', alpha=0.8, cmap="brg")
        ax1.imshow(gradient_drusen,
                   interpolation='none',
                   alpha=0.5,
                   vmin=8,
                   vmax=28,
                   cmap="RdYlGn_r")
        # plt.imshow(OBM_mask, interpolation='none', alpha=0.8, cmap='gist_rainbow', vmax=1)
        # plt.imshow(RPE_mask, interpolation='none', alpha=0.8, cmap="rainbow", vmax=1)

        ax1.text(10.0,
                 600.0,
                 "DrusenPixs: " + str(total_drusen),
                 verticalalignment='bottom',
                 horizontalalignment='left',
                 color='white',
                 fontsize=8)
        ax1.text(10.0,
                 630.0,
                 "mean: " + str(mean[i]),
                 verticalalignment='bottom',
                 horizontalalignment='left',
                 color='white',
                 fontsize=8)
        ax1.text(10.0,
                 660.0,
                 "STD: " + str(std[i]),
                 verticalalignment='bottom',
                 horizontalalignment='left',
                 color='white',
                 fontsize=8)
        ax1.text(10.0,
                 690.0,
                 "Var: " + str(var[i]),
                 verticalalignment='bottom',
                 horizontalalignment='left',
                 color='white',
                 fontsize=8)
        ax1.text(10.0,
                 720.0,
                 "RPE-OBM " + str(avarage_rpeheight),
                 verticalalignment='bottom',
                 horizontalalignment='left',
                 color='white',
                 fontsize=8)

        ax1.text(180.0,
                 600.0,
                 "homogeneity " + str(np.average(homogeneity)),
                 verticalalignment='bottom',
                 horizontalalignment='left',
                 color='white',
                 fontsize=8)
        ax1.text(180.0,
                 630.0,
                 "energy: " + str(np.average(energy)),
                 verticalalignment='bottom',
                 horizontalalignment='left',
                 color='white',
                 fontsize=8)
        ax1.text(180.0,
                 660.0,
                 "contrast: " + str(np.average(contrast)),
                 verticalalignment='bottom',
                 horizontalalignment='left',
                 color='white',
                 fontsize=8)
        ax1.text(180.0,
                 690.0,
                 "correlation: " + str(np.average(correlation)),
                 verticalalignment='bottom',
                 horizontalalignment='left',
                 color='white',
                 fontsize=8)
        ax1.text(180.0,
                 720.0,
                 "dissimilairtiy: " + str(np.average(dissimilarity)),
                 verticalalignment='bottom',
                 horizontalalignment='left',
                 color='white',
                 fontsize=8)
        plt.savefig("GDL_CHECKKING_" + str(i) + "_Final-image.png",
                    bbox_inches='tight',
                    pad_inches=0)
        plt.close()
                print("%.1f percent" % (float(i_row) / float(rows) * 100),
                      end='\r')
            for jj, i_col in enumerate(np.arange(0, cols, stride_x[rr])):
                # clip the raster subset for further calculations
                subset = layer.values[i_row:i_row + stride_y[rr],
                                      i_col:i_col + stride_x[rr]]
                if np.isnan(subset).sum() == 0:
                    mean.values[ii, jj] = np.mean(subset)
                    variance.values[ii, jj] = np.var(subset)

                    subset_scaled = 255 * (subset - layer_min) / (layer_max -
                                                                  layer_min)
                    glcm = tex.greycomatrix(subset_scaled.astype('int'), [1],
                                            [0, pi / 4, pi / 2, pi * 3 / 4],
                                            levels=256)
                    contrast.values[ii, jj] = tex.greycoprops(
                        glcm, 'contrast')[0].mean()
                    dissimilarity.values[ii, jj] = tex.greycoprops(
                        glcm, 'dissimilarity')[0].mean()
                    homogeneity.values[ii, jj] = tex.greycoprops(
                        glcm, 'homogeneity')[0].mean()
                    correlation.values[ii, jj] = tex.greycoprops(
                        glcm, 'correlation')[0].mean()
                    asm.values[ii, jj] = tex.greycoprops(glcm, 'ASM')[0].mean()

        # write array to new geotiff
        prefix = sfile.split('/')[-1][:-8]
        res_str = str(resolution).zfill(3)
        io.write_xarray_to_GeoTiff(
            mean, '%s/alos_%sm/%s_%sm_mean.tif' %
            (path2textures, res_str, prefix, res_str))
        io.write_xarray_to_GeoTiff(
예제 #12
0
def get_GLCM_features(image,
                      distances=(0),
                      angles=None,
                      levels=256,
                      symmetric=True,
                      normed=True,
                      features=None):
    """
	Function to return features extracted from the gray level co-occurrence matrix of an image
	:param image: OpenCV numpy array_like of uint8
	:param distances: array_like, object
		List of pixel pair distance offsets
	:param angles: array_like
		List of pixel pair angles in radians.
	:param levels: int, optional
		The input image should contain integers in [0, levels-1],
		where levels indicate the number of grey-levels counted (typically 256 for an 8-bit image). Default= 256.
	:param symmetric: bool, optional
		If True, the output matrix P[:, :, d, theta] is symmetric.
		This is accomplished by ignoring the order of value pairs,
		so both (i, j) and (j, i) are accumulated when (i, j) is encountered for a given offset. Default= False.
	:param normed: bool, optional
		If True, normalize each matrix P[:, :, d, theta] by dividing by
		the total number of accumulated co-occurrences for the given offset.
		The elements of the resulting matrix sum to 1. Default= False.
	:param features: array_like
		The list of desired features that can be extracted from GLCM matrix.
		Accepted values for array elements include:
			"energy", "contrast", "homogeneity", "ASM", "dissimilarity", "correlation", "entropy"
	:return: GLCM feature dictionary containing feature names as keys and the corresponding feature values
		Features included - Energy, Contrast, Homogeneity, Entropy, ASM, Dissimilarity, Correlation
	"""

    if angles is None:
        angles = [0, np.pi / 4, 2 * np.pi / 4, 3 * np.pi / 4]

    if features is None:
        features = [
            "energy", "contrast", "homogeneity", "ASM", "dissimilarity",
            "correlation", "entropy"
        ]
    else:
        accepted_features = [
            "energy", "contrast", "homogeneity", "ASM", "dissimilarity",
            "correlation", "entropy"
        ]
        for f in features:
            if f not in accepted_features:
                raise Exception("Feature " + f +
                                "is not accepted in the set of features")

    image_glcm = sktex.greycomatrix(image,
                                    distances,
                                    angles,
                                    levels=levels,
                                    symmetric=symmetric,
                                    normed=normed)

    output_features = dict()
    for feature in features:
        if feature == "entropy":
            entropy = np.zeros((1, 4))
            for i in range(image_glcm.shape[0]):
                for j in range(image_glcm.shape[1]):
                    entropy -= image_glcm[i, j] * np.ma.log(image_glcm[i, j])
            output_features[feature] = entropy
        else:
            output_features[feature] = sktex.greycoprops(image_glcm, feature)

    return output_features
#cv2.waitKey()
cv2.imwrite('..\gray.png', gray)
gray = np.array(gray, dtype=np.uint8)
#print(np.shape(gray))
#print(type(gray))
#glcm = greycomatrix(gray, [1], [0, np.pi/4, np.pi/2, 3*np.pi/4], levels=256)
#glcm = greycomatrix(gray, [1], [0], levels=256, normed=True, symmetric=True)
#glcm = greycomatrix(gray, [1], [0, 3*np.pi/4, np.pi/2, np.pi/4], levels=256)
d = 1
glcm = greycomatrix(gray, [d], [0, 3 * np.pi / 4, np.pi / 2, np.pi / 4],
                    levels=256)
print(np.shape(glcm))

print(glcm[0:7, 0:7, 0, 0])

diss = greycoprops(glcm, 'dissimilarity')
contrast = greycoprops(glcm, 'contrast')
correlation = greycoprops(glcm, 'correlation')
homogeneity = greycoprops(glcm, 'homogeneity')
ASM = greycoprops(glcm, 'ASM')
print(homogeneity[0, 0], ' ', homogeneity[0, 1], ' ', homogeneity[0, 2], ' ',
      homogeneity[0, 3])


def calc_glcm(gray, index_baris, index_kolom, x_offset, y_offset):
    x, y = gray.shape
    temp_glcm = np.zeros((256, 256))
    for i in index_baris:
        for j in index_kolom:
            current = gray[i, j]
            neighbor = gray[i + x_offset, j + y_offset]
예제 #14
0
def main():
    parser = argparse.ArgumentParser(description="""
Construct a traditional machine learning data matrix by extracting features from the objects in images.
""")
    parser.add_argument('--plate',
                        '-p',
                        help="The plate the input image is associated with",
                        required=True)
    parser.add_argument('--well',
                        '-w',
                        help="The well the input image is associated with",
                        required=True)
    parser.add_argument(
        "--input-image",
        "-i",
        help=
        "Input image for which cell_clustering.py was run on to produce --label-image.",
        required=True)
    parser.add_argument(
        "--label-image",
        "-l",
        help=
        "Output of cell_clustering.py or other method for segmenting images. Must be a CSV of an array of the same shape as the input image and has an integer in each cell assigning a pixel to an object. -1 is used for background pixels. Objects start counting at 0.",
        required=True)
    parser.add_argument(
        "--treatments",
        "-t",
        help="Treatment metadata file output from parse_treatment.py")
    parser.add_argument(
        "--outfile",
        "-o",
        help=
        "Output CSV which is a traditional ML data matrix with shape n_objects x n_features. The objects correspond to the objects in the --infile. The features include region properties, texture properties, and which chemical treatment was used."
    )
    args = parser.parse_args()

    ofh = open(args.outfile, 'w')

    intensity_image = imread(args.input_image)
    label_image = np.genfromtxt(args.label_image,
                                delimiter=",").astype('int') + 1

    ofh.write('{}\n'.format(",".join(HEADER)))
    for i in range(np.max(label_image)):
        intensity_image_slice = np.copy(intensity_image)
        intensity_image_slice[label_image != i + 1] = 0

        label_image_bool = np.copy(label_image)
        label_image_bool[label_image != i + 1] = 0
        label_image_bool[label_image == i + 1] = 1

        prop_list = regionprops(label_image_bool, intensity_image_slice)
        prop = prop_list[0]
        # region properties - {{
        # extract simple scalar region properties
        scalar_region_props = list(
            map(lambda x: prop[x], SCALAR_REGION_PROPERTIES))

        # extract region properties that require some processing
        local_centroid_row = prop['local_centroid'][0]
        local_centroid_col = prop['local_centroid'][1]
        weighted_centroid_row = prop['weighted_centroid'][0]
        weighted_centroid_col = prop['weighted_centroid'][1]

        region_props = scalar_region_props + [
            local_centroid_row, local_centroid_col, weighted_centroid_row,
            weighted_centroid_col
        ]
        # }} - region properties

        # extract texture properties for the object i - {{

        # 2nd and 3rd parameters encode a 1-pixel offset to the right, up, left, and down
        # n_dists = 1, n_angle = 4
        levels = np.max(intensity_image_slice) + 1
        grey_rv = greycomatrix(intensity_image_slice, [1],
                               [0, np.pi / 2, np.pi, 3 * np.pi / 2],
                               levels=levels)
        # greycoprops is (n_dist x n_angle)
        # compute average of the texture property
        avg_texture_props = []
        for texture_prop in TEXTURE_PROPS:
            props_rv = greycoprops(grey_rv, texture_prop)
            avg_texture_props.append(np.mean(props_rv))
        # }} - texture properties

        # combine all properties
        all_props = [args.plate, args.well] + region_props + avg_texture_props
        ofh.write(','.join(map(str, all_props)) + '\n')