コード例 #1
0
ファイル: brute_force.py プロジェクト: kkansky/and_or_images
 def train(self):
   """ Store a copy of every image in the iterator. """
   Experiment.train(self)
   start = time.time()
   
   gabor = GaborRegion((144, 192), rotations=3, 
                       initial_wavelength=3, 
                       num_wavelengths=2)
   
   # Regions = [ GaborRegion, AndRegion, OrRegion (classifier) ]
   self.network = AndOrNetwork((144,192), num_regions=1, 
                               input_region = gabor)
   and_region = self.network.regions[1]
   classifier = self.network.get_classifier()
   
   i = 0
   while self.image_iterator.has_next():
     image, category, img_idx = self.image_iterator.next()
     gabor.do_inference(numpy.array(image))
     active_nodes = gabor.get_active_nodes()
     pos = and_region.create_node((0,0), cxns = active_nodes)
     classifier.create_node(category, pos)
     i += 1
     if i % self.PRINT_INCR == 0: print "Iter:", i
     
   and_region.prepare_for_inference()
   classifier.prepare_for_inference()
   
   num_cxns = and_region.get_num_cxns() + classifier.get_num_cxns()
   print "Number of connections:", num_cxns
   elapsed = (time.time() - start)
   print "Training time:", elapsed
   print "Time per category:", (elapsed / i)
   print colored("Training complete", "green")
コード例 #2
0
def test_particle_feature_learner():
  from and_or_images.tools.image_iterators.aloi_iterator import AloiIterator
  from and_or_images.structures.gabor_region import GaborRegion
  from and_or_images.algorithms.window_samplers.grid_window_sampler import GridWindowSampler
  
  
  num_cats = 10
  images_per_category = 1
  image_shape = (144,192)
  feature_shape = (72, 96)
  
  global image_iter
  image_iter = AloiIterator(num_cats, images_per_category)
  
  gabor = GaborRegion(image_shape, rotations=3, 
                      initial_wavelength=3, 
                      num_wavelengths=2)
  
  win_sampler = GridWindowSampler(image_shape, feature_shape)
  
  global gabor_acts
  gabor_acts = gabor.precompute_image_activations(image_iter)
  # strip off the categories
  acts = [act for act,img_idx in gabor_acts]
  
  global fl
  fl = ParticleFeatureLearner(gabor, win_sampler, agreement_percent = 0.90)
  feature_assignments, features = fl.compute_features(acts)
コード例 #3
0
def test_greedy_feature_learner():
  from and_or_images.tools.image_iterators.aloi_iterator import AloiIterator
  from and_or_images.structures.gabor_region import GaborRegion
  
  global fl
  fl = GreedyFeatureLearner()
  
  num_cats = 500
  images_per_category = 1
  image_shape = (144,192)
  
  global image_iter
  image_iter = AloiIterator(num_cats, images_per_category)
  
  gabor = GaborRegion(image_shape, rotations=3, 
                      initial_wavelength=3, 
                      num_wavelengths=2)
  
  global gabor_acts
  gabor_acts = gabor.precompute_image_activations(image_iter)
  # strip off the categories
  acts = [act for act,img_idx in gabor_acts]
  
  feature_assignments, features = fl.compute_features(acts, 
                                                      agreement_threshold = 0.35,
                                                      analyze=True)
コード例 #4
0
ファイル: window_grid.py プロジェクト: kkansky/and_or_images
 def train(self):
   """ Store a copy of every image in the iterator. """
   Experiment.train(self)
   start = time.time()
   
   self.image_shape = (144, 192)
   
   gabor = GaborRegion(self.image_shape, rotations=3, 
                       initial_wavelength=3, 
                       num_wavelengths=2)
   
   # Regions = [ GaborRegion, AndRegion, OrRegion (classifier) ]
   self.network = AndOrNetwork((144,192), num_regions=2, 
                               input_region = gabor)
   f1 = self.network.regions[1]
   f2 = self.network.regions[2]
   classifier = self.network.get_classifier()
   
   self.gabor_acts = gabor.precompute_image_activations(self.image_iter)
   windows = self.get_windows()
   
   for window in windows:
     
   
   
   self.network.prepare_for_inference(1)
   elapsed = (time.time() - start)
   
   total_cxns = 0
   for i, r in enumerate(self.network.regions[1:]):
     num_cxns = r.get_num_cxns()
     print "Region %s cxns: %s" % (i, num_cxns)
     total_cxns += num_cxns
   
   print "Total connections:", total_cxns
   print "Training time:", elapsed
   print "Time per category:", (elapsed / i)
   print colored("Training complete", "green")
     
 def test(self):
   """ Test that every image is correctly recognized. """
   Experiment.test(self)
   start = time.time()
   
   classifier = self.network.get_classifier()
   i = 0
   while self.image_iterator.has_next():
     image, category = self.image_iterator.next()
     recognized = self.network.do_inference(numpy.array(image), category)
     if not recognized:
       active_cats = classifier.get_active_categories()
       print colored("Failed: " + category + " recognized as "+repr(active_cats), 'red')
     i += 1
     if i % self.PRINT_INCR == 0: print "Iter:", i
   
   elapsed = (time.time() - start)
   print "Testing time:", elapsed
   print "Time per category:", (elapsed / i)
   print colored("Testing complete", "green")
コード例 #5
0
def test_particle_feature_learner():
  from and_or_images.tools.image_iterators.aloi_iterator import AloiIterator
  from and_or_images.structures.gabor_region import GaborRegion
  from and_or_images.algorithms.window_samplers.grid_window_sampler import GridWindowSampler
  
  
  num_cats = 10
  images_per_category = 10
  image_shape = (144,192)
  
#  feature_shape = (72, 96) # 2 x 2
#  feature_shape = (36, 48) # 4 x 4
#  feature_shape = (18, 24) # 8 x 8
#  feature_shape = (9, 12) # 16 x 16
#  
#  feature_shape = (48, 48) # 3 x 4
#  feature_shape = (24, 24) # 6 x 8
  feature_shape = (12, 12) # 12 x 16 = 192
#  feature_shape = (6, 6) # 24 x 32 = 768
  
  global image_iter
  image_iter = AloiIterator(num_cats, images_per_category)
  
  gabor = GaborRegion(image_shape, rotations=3, 
                      initial_wavelength=3, 
                      num_wavelengths=2)
  
  win_sampler = GridWindowSampler(image_shape, feature_shape, overlap=False)
  
  global gabor_acts
  gabor_acts = gabor.precompute_image_activations(image_iter)
  # strip off the categories
  acts = [act for act,img_idx in gabor_acts]
  
  global fl
  fl = ParticleFeatureLearner(gabor, win_sampler, agreement_percent = 0.40)
  feature_assignments, features = fl.compute_features(acts)
コード例 #6
0
  def run(self):
    iterator = "Aloi"
    
    if iterator=='Bikes':
      num_cats = 365
      image_shape = (480, 640)
      images_per_category = 1
      image_iter = ImageListIterator("bikes", num_cats)
      
    if iterator=='Flowers':
      num_cats = 500
      image_shape = (500,500)
      images_per_category = 1
      image_iter = ImageListIterator("flowers", num_cats, randomize=True)
    
    if iterator=='Aloi':
      num_cats = 500
      images_per_category = 1
      image_shape = (144,192)
      image_iter = AloiIterator(num_cats, images_per_category)
      
    self.pickle_name = "%s-%s-%s.pkl" % (iterator, num_cats, images_per_category)
    
    print "Creating gabor region..."
    gabor = GaborRegion(image_shape, rotations=3, 
                        initial_wavelength=3, 
                        num_wavelengths=2 )
    
    
    # precompute all gabor activations
    print colored("Computing gabor activations...", 'green')
    images = []
    i = 0
    while image_iter.has_next():
      image, category, img_idx = image_iter.next()
      gabor.do_inference(numpy.array(image))
      active = gabor.node_values.copy()
      images.append((active, active.sum(), (category, img_idx)))
      i += 1
      if i%10==0: print i

    print colored("Computing overlap...", 'green')
    self.num_merges = num_merges = 10
    
    overlap_results = [[] for i in range(num_merges)]
    cxn_results = [[] for i in range(num_merges)]
    i = 0
    for active, size, idx in images:
      merged_idxs = [idx]
      cum_size = size
      
      for merge_idx in range(num_merges):
        max_overlap = 0
        max_percent = 0
        max_merge = None
        max_cidx = None
        max_size = None
        num_parents = merge_idx + 2
        
        for comp_active, csize, cidx in images:
          if cidx in merged_idxs: continue
          
          merge = numpy.logical_and(active, comp_active)
          overlap = merge.sum()
          if overlap > max_overlap:
            max_overlap = overlap
            max_merge = merge
            max_cidx = cidx
            max_size = csize
        
        active = max_merge
        cum_size += max_size
        merged_idxs.append(max_cidx)
        overlap_results[merge_idx].append(max_overlap)
        
        cxn_results[merge_idx].append(max_overlap * (num_parents-1) - num_parents)
        
      i += 1
      if i%10==0: print i
    
    fig = plt.figure(1)
    fig.set_size_inches(5, 10)
    plt.subplot(1,2,1)
    
    colors = '#008800 #880000 #000088 #888800'.split()
    
    start, end = 1000000, 0
    for color, merge_idx in zip(colors, range(num_merges)):
      start = min(start, min(overlap_results[merge_idx]))
      end = max(end, max(overlap_results[merge_idx]))
      
    for color, merge_idx in zip(colors, reversed(range(num_merges))):
      plt.hist(overlap_results[merge_idx], 20, range=(start, end), 
               facecolor=color, alpha=0.75)
      
    plt.xlabel("# shared features")
    plt.ylabel("# occurrences")
    plt.title("Component sharing\n%s categories, %s images." % \
              (num_cats, images_per_category))
    plt.grid(True)
    
    
    plt.subplot(1,2,2)
    start, end = 1000000, 0
    for color, merge_idx in zip(colors, range(num_merges)):
      start = min(start, min(cxn_results[merge_idx]))
      end = max(end, max(cxn_results[merge_idx]))
      
    for color, merge_idx in zip(colors, reversed(range(num_merges))):
      plt.hist(cxn_results[merge_idx], numpy.arange(start, end, (end-start)/20), 
               facecolor=color, alpha=0.75)
      
    plt.xlabel("# fewer connections")
    plt.ylabel("# occurrences")
    plt.title("Component sharing\n%s categories, %s images." % \
              (num_cats, images_per_category))
    plt.grid(True)
      
    plt.show()
     
    print colored("Analysis complete", "green")
    
    self.cxn_results = cxn_results
    self.overlap_results = overlap_results
    
    self.save_results()
    self.report()