def test_particle_feature_learner(): from and_or_images.tools.image_iterators.aloi_iterator import AloiIterator from and_or_images.structures.gabor_region import GaborRegion from and_or_images.algorithms.window_samplers.grid_window_sampler import GridWindowSampler num_cats = 10 images_per_category = 1 image_shape = (144,192) feature_shape = (72, 96) global image_iter image_iter = AloiIterator(num_cats, images_per_category) gabor = GaborRegion(image_shape, rotations=3, initial_wavelength=3, num_wavelengths=2) win_sampler = GridWindowSampler(image_shape, feature_shape) global gabor_acts gabor_acts = gabor.precompute_image_activations(image_iter) # strip off the categories acts = [act for act,img_idx in gabor_acts] global fl fl = ParticleFeatureLearner(gabor, win_sampler, agreement_percent = 0.90) feature_assignments, features = fl.compute_features(acts)
def test_greedy_feature_learner(): from and_or_images.tools.image_iterators.aloi_iterator import AloiIterator from and_or_images.structures.gabor_region import GaborRegion global fl fl = GreedyFeatureLearner() num_cats = 500 images_per_category = 1 image_shape = (144,192) global image_iter image_iter = AloiIterator(num_cats, images_per_category) gabor = GaborRegion(image_shape, rotations=3, initial_wavelength=3, num_wavelengths=2) global gabor_acts gabor_acts = gabor.precompute_image_activations(image_iter) # strip off the categories acts = [act for act,img_idx in gabor_acts] feature_assignments, features = fl.compute_features(acts, agreement_threshold = 0.35, analyze=True)
def train(self): """ Store a copy of every image in the iterator. """ Experiment.train(self) start = time.time() self.image_shape = (144, 192) gabor = GaborRegion(self.image_shape, rotations=3, initial_wavelength=3, num_wavelengths=2) # Regions = [ GaborRegion, AndRegion, OrRegion (classifier) ] self.network = AndOrNetwork((144,192), num_regions=2, input_region = gabor) f1 = self.network.regions[1] f2 = self.network.regions[2] classifier = self.network.get_classifier() self.gabor_acts = gabor.precompute_image_activations(self.image_iter) windows = self.get_windows() for window in windows: self.network.prepare_for_inference(1) elapsed = (time.time() - start) total_cxns = 0 for i, r in enumerate(self.network.regions[1:]): num_cxns = r.get_num_cxns() print "Region %s cxns: %s" % (i, num_cxns) total_cxns += num_cxns print "Total connections:", total_cxns print "Training time:", elapsed print "Time per category:", (elapsed / i) print colored("Training complete", "green") def test(self): """ Test that every image is correctly recognized. """ Experiment.test(self) start = time.time() classifier = self.network.get_classifier() i = 0 while self.image_iterator.has_next(): image, category = self.image_iterator.next() recognized = self.network.do_inference(numpy.array(image), category) if not recognized: active_cats = classifier.get_active_categories() print colored("Failed: " + category + " recognized as "+repr(active_cats), 'red') i += 1 if i % self.PRINT_INCR == 0: print "Iter:", i elapsed = (time.time() - start) print "Testing time:", elapsed print "Time per category:", (elapsed / i) print colored("Testing complete", "green")
def test_particle_feature_learner(): from and_or_images.tools.image_iterators.aloi_iterator import AloiIterator from and_or_images.structures.gabor_region import GaborRegion from and_or_images.algorithms.window_samplers.grid_window_sampler import GridWindowSampler num_cats = 10 images_per_category = 10 image_shape = (144,192) # feature_shape = (72, 96) # 2 x 2 # feature_shape = (36, 48) # 4 x 4 # feature_shape = (18, 24) # 8 x 8 # feature_shape = (9, 12) # 16 x 16 # # feature_shape = (48, 48) # 3 x 4 # feature_shape = (24, 24) # 6 x 8 feature_shape = (12, 12) # 12 x 16 = 192 # feature_shape = (6, 6) # 24 x 32 = 768 global image_iter image_iter = AloiIterator(num_cats, images_per_category) gabor = GaborRegion(image_shape, rotations=3, initial_wavelength=3, num_wavelengths=2) win_sampler = GridWindowSampler(image_shape, feature_shape, overlap=False) global gabor_acts gabor_acts = gabor.precompute_image_activations(image_iter) # strip off the categories acts = [act for act,img_idx in gabor_acts] global fl fl = ParticleFeatureLearner(gabor, win_sampler, agreement_percent = 0.40) feature_assignments, features = fl.compute_features(acts)