def get(self): # parse arguments row = int(self.get_argument('row')) col = int(self.get_argument('col')) result = self.app.result predicted_label = result.label_names[row] actual_label = result.label_names[col] # get list of vertices in that cell region_label_data = result.confusion_matrix_item_freq.rows[row].cells[col].items region_label_data = region_label_data[0:150] # limit to top 150 regions region_indices = [ region.item_id for region in region_label_data] regions_dict = self.app.irg_reader.BatchGetRegions(region_indices) image_ids = [ region.image_id for region in regions_dict.itervalues()] if self.app.image_loader == None: LOG(INFO, 'loading images...') self.app.image_loader = visutil.BatchImageLoader(self.app.images_uri) self.imageid_to_jpeg = self.app.image_loader.BatchGetImages(image_ids) regionid_to_svg = {} for region_id, region in regions_dict.iteritems(): regionid_to_svg[region_id] = self.RenderRegionSvg(region) params = { 'actual_label' : actual_label, 'predicted_label' : predicted_label, 'region_label_data' : region_label_data, 'regions_dict' : regions_dict, 'regionid_to_svg' : regionid_to_svg, 'labelid_to_labelnames' : result.label_names } self.render('confusion_matrix_cell.html', **params) return
def __init__(self): dataset_name = 'tide_v08_distractors' base_uri = 'local://home/ubuntu/Desktop/vol-7f209e0c/itergraph/%s' % dataset_name self.images_uri = '%s/cropped_scaled_photoid_to_image.pert' % ( base_uri) self.matches_uri = '%s/usift/cbir/9e6c60d825c5a9814e19c0a735747011/itergraph/0ffd08c74629993310fd73f6461673af/merged_matches.pert' % ( base_uri) self.irg_uri = '%s/usift/cbir/9e6c60d825c5a9814e19c0a735747011/itergraph/0ffd08c74629993310fd73f6461673af/image_region_graph.pert' % ( base_uri) self.tide_uri = '%s/objectid_to_object.pert' % (base_uri) self.graph = graph.GetCachedGraph(self.irg_uri, self.tide_uri) self.image_loader = visutil.BatchImageLoader(self.images_uri) self.graph_render = renderers.GraphNeighborhoodRenderer(self.graph) print 'done loading' self.application = tornado.web.Application( [ (r"/", MainHandler, dict(app=self)), (r"/graph", GraphHandler, dict(app=self)), #(r"/vertex", VertexHandler, dict(app=self)), #(r"/edge", EdgeHandler, dict(app=self)), (r"/image", ImageHandler, dict(app=self)), ], debug=True) return
def __init__(self): dataset_name = 'tide_v08_distractors' base_uri = 'local://home/ubuntu/Desktop/vol-7f209e0c/itergraph/%s' % dataset_name self.images_uri = '%s/cropped_scaled_photoid_to_image.pert' % ( base_uri) self.matches_uri = '%s/usift/cbir/9e6c60d825c5a9814e19c0a735747011/itergraph/0ffd08c74629993310fd73f6461673af/merged_matches.pert' % ( base_uri) self.image_graph_uri = '%s/usift/cbir/9e6c60d825c5a9814e19c0a735747011/itergraph/0ffd08c74629993310fd73f6461673af/image_graph.pert' % ( base_uri) self.tide_uri = '%s/objectid_to_object.pert' % (base_uri) # dataset_name = 'tide_v08' # base_uri = 'local://media/vol-0449ca74/itergraph/%s' % dataset_name # self.images_uri = '%s/cropped_scaled_photoid_to_image.pert' % (base_uri) # self.matches_uri = '%s/usift/cbir/d27df409ad95e12823feed0c658eabeb/itergraph/1b8ba7a00a9d1cd558716ce882e8408f/merged_matches.pert' % (base_uri) # self.image_graph_uri = '%s/usift/cbir/d27df409ad95e12823feed0c658eabeb/itergraph/1b8ba7a00a9d1cd558716ce882e8408f/image_graph.pert' % (base_uri) # self.tide_uri = '%s/objectid_to_object.pert' % (base_uri) # dataset_name = 'tide_v14_mixed_v2' # base_uri = 'local://media/vol-0449ca74/itergraph/%s' % dataset_name # self.images_uri = '%s/photoid_to_image.pert' % (base_uri) # self.matches_uri = '%s/usift/cbir/654c8f59fd938958c1c739fd65949dad/itergraph/e4baab98c10a434d90d092c71ecb566c/merged_matches.pert' % (base_uri) # self.image_graph_uri = '%s/usift/cbir/654c8f59fd938958c1c739fd65949dad/itergraph/e4baab98c10a434d90d092c71ecb566c/image_graph.pert' % (base_uri) # self.tide_uri = '%s/objectid_to_object.pert' % (base_uri) self.image_graph = tide_image_graph.GetCachedTideImageGraph( self.image_graph_uri, self.tide_uri) self.image_loader = visutil.BatchImageLoader(self.images_uri) self.graph_render = renderers.TideImageGraphRenderer(self.image_graph) self.match_renderer = renderers.MatchRenderer(self.matches_uri, self.images_uri) print 'done loading' self.application = tornado.web.Application([ (r"/", MainHandler, dict(app=self)), (r"/neighborhood", NeighborhoodGraphHandler, dict(app=self)), (r"/objectsubgraph", ObjectSubgraphHandler, dict(app=self)), (r"/match", MatchHandler, dict(app=self)), (r"/image", ImageHandler, dict(app=self)), ], debug=True) return
def GetImageLoader(self): if self.image_loader == None: LOG(INFO, 'loading images...') self.image_loader = visutil.BatchImageLoader(self.images_uri) return self.image_loader
def main(): # images_uri = 'local://media/vol-0449ca74/itergraph/tide_v14/cropped_scaled_photoid_to_image_randomaccess.pert' # tide_uri = 'local://media/vol-0449ca74/itergraph/tide_v14/objectid_to_object.pert' # distractor_images_uri = 'local://media/vol-0449ca74/oxc1_100k/photoid_to_image.pert' # output_base_uri = 'local://media/vol-0449ca74/itergraph/tide_v14_mixed_v2/' # images_uri = 'local://media/vol-0449ca74/itergraph/tide_v16/photoid_to_image.pert' # tide_uri = 'local://media/vol-0449ca74/itergraph/tide_v16/objectid_to_object.pert' # distractor_images_uri = 'local://media/vol-0449ca74/oxc1_100k/photoid_to_image.pert' # output_base_uri = 'local://media/vol-0449ca74/itergraph/tide_v16_mixed/' images_uri = 'local://media/vol-0449ca74/itergraph/tide_v18/photoid_to_image.pert' tide_uri = 'local://media/vol-0449ca74/itergraph/tide_v18/objectid_to_object.pert' distractor_images_uri = 'local://media/vol-0449ca74/oxc1_100k/photoid_to_image.pert' output_base_uri = 'local://media/vol-0449ca74/itergraph/tide_v18_mixed/' output_tide_uri = '%s/objectid_to_object.pert' % (output_base_uri) output_images_uri = '%s/photoid_to_image.pert' % (output_base_uri) image_loader = visutil.BatchImageLoader(images_uri) tide_objects = OpenTideDataset(tide_uri) distractor_image_loader = DistractorImageLoader(distractor_images_uri) CHECK_EQ(len(tide_objects), 2) object_a = tide_objects[0] object_b = tide_objects[1] new_object_a = InitNewObject(object_a) new_object_b = InitNewObject(object_b) a_none_image_ids = [photo.id for photo in object_a.photos if photo.label == tide_pb2.NONE ] b_none_image_ids = [photo.id for photo in object_b.photos if photo.label == tide_pb2.NONE ] mixed_aux_images = {} for i, (imageid_a, imageid_b) in enumerate(zip(a_none_image_ids, b_none_image_ids)): mixed_aux_images[imageid_a] = (imageid_a, imageid_b) mixed_image_ids = mixed_aux_images.keys() n = int(len(mixed_image_ids)/2.0) InitNoneLabels(new_object_a, mixed_image_ids[0:n]) InitNoneLabels(new_object_b, mixed_image_ids[n:-1]) new_objects = [new_object_a, new_object_b] image_ids = [] for obj in new_objects: for photo in obj.photos: image_ids.append(photo.id) image_ids.sort() # write new tide pert tide_writer = py_pert.ProtoTableWriter() tide_writer.Open(tide_pb2.Object(), output_tide_uri, 1) for obj in new_objects: tide_writer.Add(iwutil.Uint64ToKey(obj.id), obj.SerializeToString()) tide_writer.Close() # write new image pert try: image_writer = py_pert.ProtoTableWriter() image_writer.Open(iw_pb2.JpegImage(), output_images_uri, 1) used_image_ids = set() progress = iwutil.MakeProgressBar(len(image_ids)) for i, image_id in enumerate(image_ids): jpeg = None if image_id in mixed_aux_images: imageid_a, imageid_b = mixed_aux_images[image_id] jpeg_a = image_loader.GetImage(imageid_a) jpeg_b = image_loader.GetImage(imageid_b) if jpeg_a == None or jpeg_b == None: LOG(INFO, 'skipping missing jpeg') continue jpeg = CreateMixedJpeg(jpeg_a, jpeg_b) else: distractor = None while True: distractor = distractor_image_loader.GetNextImage() if distractor.width > distractor.height: break CHECK(distractor) jpeg = CreateMixedJpeg(image_loader.GetImage(image_id), distractor) CHECK(image_id not in used_image_ids) CHECK(jpeg) image_writer.Add(iwutil.Uint64ToKey(image_id), jpeg.SerializeToString()) used_image_ids.add(image_id) progress.update(i) image_writer.Close() except: pass return