def frequency_filter_images(self): r""" """ filter_images = [] for fft_padded_filter in self.fft_padded_filters: spatial_filter = np.real(ifft2(fft_padded_filter)) spatial_filter = crop(spatial_filter, self.patch_shape)[:, ::-1, ::-1] frequency_filter = np.abs(fftshift(fft2(spatial_filter))) filter_images.append(Image(frequency_filter)) return filter_images
def frequency_filter_images(self): r""" Returns a `list` of `n_experts` filter images on the frequency domain. :type: `list` of `menpo.image.Image` """ filter_images = [] for fft_padded_filter in self.fft_padded_filters: spatial_filter = np.real(ifft2(fft_padded_filter)) spatial_filter = crop(spatial_filter, self.patch_shape)[:, ::-1, ::-1] frequency_filter = np.abs(fftshift(fft2(spatial_filter))) filter_images.append(Image(frequency_filter)) return filter_images
def _train(self, images, shapes, prefix='', verbose=False, increment=False): # Define print_progress partial wrap = partial(print_progress, prefix='{}Training experts' .format(prefix), end_with_newline=not prefix, verbose=verbose) # If increment is False, we need to initialise/reset the ensemble of # experts if not increment: self.fft_padded_filters = [] self.auto_correlations = [] self.cross_correlations = [] # Set number of images self.n_images = len(images) else: # Update number of images self.n_images += len(images) # Obtain total number of experts n_experts = shapes[0].n_points # Train ensemble of correlation filter experts fft_padded_filters = [] auto_correlations = [] cross_correlations = [] for i in wrap(range(n_experts)): patches = [] for image, shape in zip(images, shapes): # Select the appropriate landmark landmark = PointCloud([shape.points[i]]) # Extract patch patch = self._extract_patch(image, landmark) # Add patch to the list patches.append(patch) if increment: # Increment correlation filter correlation_filter, auto_correlation, cross_correlation = ( self._icf.increment(self.auto_correlations[i], self.cross_correlations[i], self.n_images, patches, self.response)) else: # Train correlation filter correlation_filter, auto_correlation, cross_correlation = ( self._icf.train(patches, self.response)) # Pad filter with zeros padded_filter = pad(correlation_filter, self.padded_size) # Compute fft of padded filter fft_padded_filter = fft2(padded_filter) # Add fft padded filter to list fft_padded_filters.append(fft_padded_filter) auto_correlations.append(auto_correlation) cross_correlations.append(cross_correlation) # Turn list into ndarray self.fft_padded_filters = np.asarray(fft_padded_filters) self.auto_correlations = np.asarray(auto_correlations) self.cross_correlations = np.asarray(cross_correlations)
def _train(self, images, shapes, prefix='', verbose=False, increment=False): # Define print_progress partial wrap = partial(print_progress, prefix='{}Training experts'.format(prefix), end_with_newline=not prefix, verbose=verbose) # If increment is False, we need to initialise/reset the ensemble of # experts if not increment: self.fft_padded_filters = [] self.auto_correlations = [] self.cross_correlations = [] # Set number of images self.n_images = len(images) else: # Update number of images self.n_images += len(images) # Obtain total number of experts n_experts = shapes[0].n_points # Train ensemble of correlation filter experts fft_padded_filters = [] auto_correlations = [] cross_correlations = [] for i in wrap(range(n_experts)): patches = [] for image, shape in zip(images, shapes): # Select the appropriate landmark landmark = PointCloud([shape.points[i]]) # Extract patch patch = self._extract_patch(image, landmark) # Add patch to the list patches.append(patch) if increment: # Increment correlation filter correlation_filter, auto_correlation, cross_correlation = ( self._icf.increment(self.auto_correlations[i], self.cross_correlations[i], self.n_images, patches, self.response)) else: # Train correlation filter correlation_filter, auto_correlation, cross_correlation = ( self._icf.train(patches, self.response)) # Pad filter with zeros padded_filter = pad(correlation_filter, self.padded_size) # Compute fft of padded filter fft_padded_filter = fft2(padded_filter) # Add fft padded filter to list fft_padded_filters.append(fft_padded_filter) auto_correlations.append(auto_correlation) cross_correlations.append(cross_correlation) # Turn list into ndarray self.fft_padded_filters = np.asarray(fft_padded_filters) self.auto_correlations = np.asarray(auto_correlations) self.cross_correlations = np.asarray(cross_correlations)