def run_one_gabor(self, image_name, object_name, scale, workspace): objects = workspace.get_objects(object_name) labels = objects.segmented object_count = np.max(labels) if object_count > 0: image = workspace.image_set.get_image(image_name, must_be_grayscale=True) pixel_data = image.pixel_data labels = objects.segmented if image.has_mask: mask = image.mask else: mask = None try: pixel_data = objects.crop_image_similarly(pixel_data) if mask is not None: mask = objects.crop_image_similarly(mask) labels[~mask] = 0 except ValueError: pixel_data, m1 = cpo.size_similarly(labels, pixel_data) labels[~m1] = 0 if mask is not None: mask, m2 = cpo.size_similarly(labels, mask) labels[~m2] = 0 labels[~mask] = 0 pixel_data = normalized_per_object(pixel_data, labels) best_score = np.zeros((object_count,)) for angle in range(self.gabor_angles.value): theta = np.pi * angle / self.gabor_angles.value g = gabor(pixel_data, labels, scale, theta) score_r = fix(scind.sum(g.real, labels, np.arange(object_count, dtype=np.int32) + 1)) score_i = fix(scind.sum(g.imag, labels, np.arange(object_count, dtype=np.int32) + 1)) score = np.sqrt(score_r ** 2 + score_i ** 2) best_score = np.maximum(best_score, score) else: best_score = np.zeros((0,)) statistics = self.record_measurement(workspace, image_name, object_name, scale, F_GABOR, best_score) return statistics
def run_one_gabor(self, image_name, object_name, scale, workspace): objects = workspace.get_objects(object_name) labels = objects.segmented object_count = np.max(labels) if object_count > 0: image = workspace.image_set.get_image(image_name, must_be_grayscale=True) pixel_data = image.pixel_data labels = objects.segmented if image.has_mask: mask = image.mask else: mask = None try: pixel_data = objects.crop_image_similarly(pixel_data) if mask is not None: mask = objects.crop_image_similarly(mask) labels[~mask] = 0 except ValueError: pixel_data, m1 = size_similarly(labels, pixel_data) labels[~m1] = 0 if mask is not None: mask, m2 = size_similarly(labels, mask) labels[~m2] = 0 labels[~mask] = 0 pixel_data = normalized_per_object(pixel_data, labels) best_score = np.zeros((object_count,)) for angle in range(self.gabor_angles.value): theta = np.pi * angle / self.gabor_angles.value g = gabor(pixel_data, labels, scale, theta) score_r = fix(scind.sum(g.real, labels, np.arange(object_count, dtype=np.int32)+ 1)) score_i = fix(scind.sum(g.imag, labels, np.arange(object_count, dtype=np.int32)+ 1)) score = np.sqrt(score_r**2+score_i**2) best_score = np.maximum(best_score, score) else: best_score = np.zeros((0,)) statistics = self.record_measurement(workspace, image_name, object_name, scale, F_GABOR, best_score) return statistics
def run_image_gabor(self, image_name, scale, workspace): image = workspace.image_set.get_image(image_name, must_be_grayscale=True) pixel_data = image.pixel_data labels = np.ones(pixel_data.shape, int) if image.has_mask: labels[~image.mask] = 0 pixel_data = stretch(pixel_data, labels > 0) best_score = 0 for angle in range(self.gabor_angles.value): theta = np.pi * angle / self.gabor_angles.value g = gabor(pixel_data, labels, scale, theta) score_r = np.sum(g.real) score_i = np.sum(g.imag) score = np.sqrt(score_r**2 + score_i**2) best_score = max(best_score, score) statistics = self.record_image_measurement(workspace, image_name, scale, F_GABOR, best_score) return statistics
def run_image_gabor(self, image_name, scale, workspace): image = workspace.image_set.get_image(image_name, must_be_grayscale=True) pixel_data = image.pixel_data labels = np.ones(pixel_data.shape, int) if image.has_mask: labels[~image.mask] = 0 pixel_data = stretch(pixel_data, labels > 0) best_score = 0 for angle in range(self.gabor_angles.value): theta = np.pi * angle / self.gabor_angles.value g = gabor(pixel_data, labels, scale, theta) score_r = np.sum(g.real) score_i = np.sum(g.imag) score = np.sqrt(score_r ** 2 + score_i ** 2) best_score = max(best_score, score) statistics = self.record_image_measurement(workspace, image_name, scale, F_GABOR, best_score) return statistics
def extract(self): '''Extracts Gabor texture features by filtering the intensity image with Gabor kernels for a defined range of `frequency` and `theta` values and then calculating a score for each object. Returns ------- pandas.DataFrame extracted feature values for each object in `label_image` ''' # Create an empty dataset in case no objects were detected logger.info('extract texture features') features = list() for obj in self.object_ids: mask = self.get_object_mask_image(obj) label = mask.astype(np.int32) img = self.get_object_intensity_image(obj) img[~mask] = 0 values = list() # Gabor logger.debug('extract Gabor features for object #%d', obj) for freq in self.frequencies: best_score = 0 for angle in range(self.theta_range): theta = np.pi * angle / self.theta_range g = gabor(img, label, freq, theta) score_r = ndi.measurements.sum( g.real, label, np.arange(1, dtype=np.int32) + 1 ) score_i = ndi.measurements.sum( g.imag, label, np.arange(1, dtype=np.int32) + 1 ) score = np.sqrt(score_r**2 + score_i**2) best_score = np.max([best_score, score]) values.append(best_score) # Threshold Adjacency Statistics logger.debug('extract TAS features for object #%d', obj) tas_values = mh.features.pftas(img, T=self._threshold) values.extend(tas_values) # Hu logger.debug('extract Hu moments for object #%d', obj) region = self.object_properties[obj] hu_values = region.weighted_moments_hu values.extend(hu_values) # Local Binary Pattern logger.debug('extract Local Binary Patterns for object #%d', obj) for r in self.radius: # We may want to use more points, but the number of features # increases exponentially with the number of neighbourhood # points. vals = mh.features.lbp(img, radius=r, points=8) values.extend(vals) if self.compute_haralick: # Haralick logger.debug('extract Haralick features for object #%d', obj) # NOTE: Haralick features are computed on 8-bit images. clipped_img = np.clip(img, 0, self._clip_value) rescaled_img = mh.stretch(clipped_img) haralick_values = mh.features.haralick( img, ignore_zeros=False, return_mean=True ) if not isinstance(haralick_values, np.ndarray): # NOTE: setting `ignore_zeros` to True creates problems for some # objects, when all values of the adjacency matrices are zeros haralick_values = np.empty((len(self.names), ), dtype=float) haralick_values[:] = np.NAN values.extend(haralick_values) features.append(values) return pd.DataFrame(features, columns=self.names, index=self.object_ids)