def stats_of_agreement(lis_1, lis_2): """ Show some stats about the Cohen Kappa agreement It considers the intersection of objects between annotators.""" annotator1 = [] annotator2 = [] both_annotators = [] with fh.LisFile(lis_1) as flis1, \ fh.LisFile(lis_2) as flis2: if flis1.nb_frames() != flis2.nb_frames(): logger.error('Files do not contain the same number of frames.') else: for frame_objs1, frame_objs2 in zip( flis1.objects_in_frame(ids=True), flis2.objects_in_frame(ids=True)): idfr, objs1 = frame_objs1 idfr, objs2 = frame_objs2 objs1, objs2, both = intersection(objs1, objs2) annotator1.extend(objs1) annotator2.extend(objs2) both_annotators.extend(both) print('Only annotator 1:', len(annotator1)) print('Only annotator 2:', len(annotator2)) print('Both annotators :', len(both_annotators)) print('Total annotation:', len(annotator1) + len(annotator2) + len(both_annotators))
def agreement_iou(lis_1, lis_2): """ Generate the IoU score for each pair of bounding box. Output: list containing (id_frame, id_object, iou) """ list_iou = [] with fh.LisFile(lis_1) as flis1, \ fh.LisFile(lis_2) as flis2: if flis1.nb_frames() != flis2.nb_frames(): logger.error('Files do not contain the same number of frames.') else: pb = pbar.ProgressBar(flis1.nb_frames()) for frame_objs1, frame_objs2 \ in zip(flis1.objects_in_frame(ids=True, pos=True), \ flis2.objects_in_frame(ids=True, pos=True)): idfr, objs1 = frame_objs1 idfr, objs2 = frame_objs2 # get correspondence between bounding boxes dpairs = align_objects(objs1, objs2) for idobj in dpairs: for bbox1, bbox2 in dpairs[idobj]: bbox1 = (bbox1[0], bbox1[1], bbox1[0] + bbox1[2], bbox1[1] + bbox1[3]) bbox2 = (bbox2[0], bbox2[1], bbox2[0] + bbox2[2], bbox2[1] + bbox2[3]) iou = intersection_over_union(bbox1, bbox2) # (id_frame, id_object, iou) list_iou.append((idfr, idobj, round(iou, 2))) pb.update() return list_iou
def main(vocfile, mapfile, output=None): """ Convert VOC paths to KSCGR paths """ if not output: fname, _ = splitext(basename(vocfile)) output = join(dirname(vocfile), fname + '_kscgr.txt') # load mapping dmap = fh.MapFile(mapfile).load_dictionary(key='voc') logger.info('Loaded mapping for {} paths'.format(len(dmap))) flis = fh.LisFile(vocfile) pb = pbar.ProgressBar(flis.nb_lines()) with flis as fin, open(output, 'w') as fout: for arr in fin: if not dmap.has_key(arr[-1]): logger.warning( 'Could not map file! Key do not exist. {} [LINE: {}]'. format(arr[-1], flis.nb_line)) #test flis.fname else: path = dmap[arr[-1]] idfr, _ = splitext(basename(path)) fout.write('{}\t{}\t{}\t{}\t{}\n'.format( idfr, arr[1], arr[2], arr[3], path)) pb.update() logger.info('File saved at: {}'.format(output))
def convert_bounding_box(file_input, output=None): if not output: fname, _ = splitext(basename(file_input)) output = join(dirname(file_input), fname + '_original.txt') df = fh.LisFile(file_input) pb = pbar.ProgressBar(df.nb_frames()) header = False with df as fin, open(output, 'w') as fout: for idfr, obj, _, idobj, path in df: if idfr == '0' and not header: fout.write( 'Frame:\tLabel:\tPoints:\tBounding Box ID:\tFrame path: %s\n' % df.path) header = True #print idfr, obj, df.x, df.y, df.w, df.h, idobj, path x = 80 + (df.x * 1.875) y = df.y * 1.875 w = df.w * 1.875 h = df.h * 1.875 fout.write('%s\t%s\t(%d,%d,%d,%d)\t%s\t%s\n' % (idfr, obj, x, y, w, h, idobj, path)) #pb.update() fout.write('---\nModified on:\t10.1.2019\t16:43') logger.info('File saved at: %s' % output)
def cohens_kappa(lis_1, lis_2): """ Calculate the Cohen's Kappa agreement """ annotator1 = [] annotator2 = [] with fh.LisFile(lis_1) as flis1, \ fh.LisFile(lis_2) as flis2: if flis1.nb_frames() != flis2.nb_frames(): logger.error('Files do not contain the same number of frames.') else: for frame_objs1, frame_objs2 in zip( flis1.objects_in_frame(ids=True), flis2.objects_in_frame(ids=True)): idfr, objs1 = frame_objs1 idfr, objs2 = frame_objs2 objs1, objs2 = align_lists(objs1, objs2) annotator1.extend(objs1) annotator2.extend(objs2) kappa = cohen_kappa_score(annotator1, annotator2) print(kappa)
def _loadImages(self): """ Extract the content from the file and stores into an array. """ self.imgdata = [] with fh.LisFile(self.input) as flis: for arr in flis.iterate_frames(): path = arr[0] name, ext = splitext(basename(path)) self._check_size(path) positions = [] for vec in arr[1]: if vec: obj, x, y, w, h = vec positions.append((x, y, x + w, y + h, DIC[obj])) else: positions.append([]) self.imgdata.append((path, positions)) return self.imgdata
def merge_annotation(folder_input, output=None, class_file='classes.cfg'): if not output: output = join(folder_input, 'merged_bboxes.txt') # Load classes for objects from dict {0: 'rel0', 1: 'rel1'} do = fh.ConfigFile(class_file).load_classes(cnames=True) logger.info('Loaded dictionary with {} objects.'.format(len(do))) files = fh.FolderHandler(folder_input) with open(output, 'w') as fout: fout.write('Frame\tLabel\tPoints\tBounding Box ID\tFrame path:\n') for path in files: logger.info('Processing file: %s' % path) filelis = fh.LisFile(path) with filelis as flis: for arr in flis: check_error(do, flis.obj, flis.nb_line) # 0 table (0,112,101,142) 29 0.jpg fout.write('%s\t%s\t%s\t%s\t%s\n' % (arr[0], arr[1], arr[2], arr[3], join(flis.path, arr[4]))) logger.info('Saved bounding boxes in file: %s' % output)
def sanitize_for_bounding_boxes(file_objects, file_relations, output=None): """ Check whether a file of relations is according with the bouding boxes described in the `file_objects` file. """ if not output: fname, ext = splitext(basename(file_relations)) output = join(dirname(file_relations), fname+'_sanity'+ext) verify_sequence_frames(file_objects) verify_sequence_frames(file_relations) drels = defaultdict(list) # Load groups of relations for frame frls = fh.DecompressedFile(file_relations) with frls as frels: for arr in frels: fr, o1, r, o2 = arr[0], arr[1], arr[2], arr[3] pathimg = str(fr)+'.jpg' drels[pathimg].append((fr, o1, r, o2)) logger.info('Loaded relations for {} frames.'.format(len(drels))) errors = 0 filelis = fh.LisFile(file_objects) with open(output, 'w') as fout, filelis as flis: fout.write('Frame\tSubject\tRelation\tObject\tPath: {}\n'.format(frls.path)) for pathimg, arr in flis.iterate_frames(): objects = [bbox[0] for bbox in arr] relations = drels[pathimg] for fr, o1, r, o2 in relations: if check_error(objects, o1, pathimg) and check_error(objects, o2, pathimg): fout.write('{}\t{}\t{}\t{}\n'.format(fr, o1, r, o2)) else: errors += 1 if errors: logger.info('Finished WITH {} errors!'.format(errors)) else: os.remove(output) logger.info('Finished without errors!')
def create_video_from_file(inputfile, outputfile, file_classes='classes.cfg'): do = fh.ConfigFile(file_classes).load_classes(cnames=True) img_array = [] if not outputfile: fname, _ = splitext(basename(inputfile)) fnameout = join(dirname(inputfile), fname + '.avi') fann = fh.LisFile(inputfile) pb = pbar.ProgressBar(fann.nb_frames()) metadata = False with fann as flis: for fname, objs in flis.iterate_frames(): if exists(fname): img = cv2.imread(fname) if not metadata: height, width, layers = img.shape size = (width, height) metadata = True for label, xmin, ymin, w, h in objs: id = do[label] cname = colors.cnames.keys()[id] xmax = xmin + w ymax = ymin + h cv2.rectangle(img, (xmin, ymin), (xmax, ymax), BBOX_COLOR, 1) cv2.putText(img, label, (xmin - 10, ymin - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, BBOX_COLOR, 1) img_array.append(img) else: logger.info('{} not a file.'.format(fname)) pb.update() out = cv2.VideoWriter(fnameout, cv2.VideoWriter_fourcc(*'DIVX'), 30, size) for i in range(len(img_array)): out.write(img_array[i]) out.release()
def main(inputfile, size_in, size_out): """ Convert annotation from size_in to size_out images. """ fname, _ = splitext(basename(inputfile)) foutname = fname + '_' + str(size_out) + '.txt' foutput = join(dirname(inputfile), foutname) flis = fh.LisFile(inputfile) pb = pbar.ProgressBar(flis.nb_lines()) with flis as flis, open(foutput, 'w') as fout: for arr in flis: x_out = int((float(flis.x) / size_in) * size_out) y_out = int((float(flis.y) / size_in) * size_out) w_out = int((float(flis.w) / size_in) * size_out) h_out = int((float(flis.h) / size_in) * size_out) #86 \t person \t (0,51,49,64) \t 0 \t /home/roger/KSCGR/data1/boild-egg/rgb256/86.jpg fout.write('%d\t%s\t(%d,%d,%d,%d)\t%d\t%s\n' % (flis.idfr, flis.obj, x_out, y_out, w_out, h_out, flis.idobj, flis.fname)) pb.update() logger.info('Converted %d lines' % flis.nb_lines()) logger.info('Saved output file as: %s' % foutput)
def main(fileobj, filerel, output=None, class_file='classes.cfg', rels_file='relations.cfg', map_paths='map_paths.txt'): """ Create a `train.pkl` or 'test.pkl` file containing the relationship between objects. TODO: Implement relations for two objects of the same class in the same image """ if not output: output = join(dirname(fileobj), 'train.pkl') fdicobj = join(dirname(output), 'obj.txt') fdicrel = join(dirname(output), 'rel.txt') if map_paths: fmap = fh.MapFile(map_paths) dmap = fmap.load_dictionary(key='kscgr') logger.info('Loaded map file containing {} entries.'.format(len(dmap))) home = fmap.path # Load classes for objects from dict {0: 'rel0', 1: 'rel1'} # DO NOT LOAD `__background__`. Thus, id_person=0 do = fh.ConfigFile(class_file, background=False).load_classes(cnames=True) logger.info('Loaded dictionary with {} objects.'.format(len(do))) dr = fh.ConfigFile(rels_file).load_classes(cnames=True) logger.info('Loaded dictionary with {} relations.'.format(len(dr))) dic_rels = defaultdict(list) # relations for each image logger.info('Loading information from file: {}'.format(filerel)) filerls = fh.DecompressedFile(filerel) pb = pbar.ProgressBar(filerls.nb_lines()) with filerls as frels: for fr, o1, r, o2, path in frels: idsub = do[o1] idrel = dr[r] idobj = do[o2] pathimg = join(path, str(fr) + '.jpg') if map_paths: pathimg = dmap[join(home, pathimg)] dic_rels[pathimg].append((idsub, idrel, idobj)) pb.update() print info = [] # Load objects logger.info('Loading information from file: {}'.format(fileobj)) flis = fh.LisFile(fileobj) nb_frames = filerls.nb_frames() pb = pbar.ProgressBar(nb_frames) logger.info('Processing {} frames.'.format(nb_frames)) with flis as fin: for imgname, arr in flis.iterate_frames(): filepath = dmap[join(home, imgname)] classes, boxes = [], [] vsub, vobj, vrel = [], [], [] dor = {} for i in range(len(arr)): obj, x, y, w, h = arr[i] iobj = do[obj] dor[iobj] = i classes.append(iobj) boxes.append([x, y, x + w, y + w]) # [xmin,ymin,xmax,ymax] for idsub, idrel, idobj in dic_rels[filepath]: vsub.append(dor[idsub]) vobj.append(dor[idobj]) vrel.append([idrel]) info.append({ 'img_path': filepath, 'classes': np.array(classes), 'boxes': np.array(boxes), 'ix1': np.array(vsub), 'ix2': np.array(vobj), 'rel_classes': vrel }) pb.update() logger.info('Saving pickle file...') fout = open(output, 'wb') cPickle.dump(info, fout) fout.close() logger.info('Saved content in file: {}'.format(output)) save_dictionary(fdicobj, do) save_dictionary(fdicrel, dr)
def create_gt_pickle(fileobj, filerel, output=None, class_file='classes.cfg', rels_file='relations.cfg'): """ Create a `gt.pkl` file containing the relationship between subjects and objects. TODO: Implement relations for two objects of the same class in the same image """ if not output: output = join(dirname(fileobj), 'gt.pkl') dgt = {'sub_bboxes': [], 'obj_bboxes': [], 'tuple_label': []} # Load classes for objects from dict {0: 'rel0', 1: 'rel1'} # DO NOT LOAD `__background__`. Thus, id_person=0 do = fh.ConfigFile(class_file, background=False).load_classes(cnames=True) logger.info('Loaded dictionary with {} objects.'.format(len(do))) dr = fh.ConfigFile(rels_file).load_classes(cnames=True) logger.info('Loaded dictionary with {} relations.'.format(len(dr))) ''' dic_rels = defaultdict(list) # relations for each image logger.info('Loading information from file: {}'.format(filerel)) filerls = fh.DecompressedFile(filerel) pb = pbar.ProgressBar(filerls.nb_lines()) with filerls as frels: for fr, o1, r, o2, path in frels: idsub = do[o1] idrel = dr[r] idobj = do[o2] pathimg = join(path, str(fr)+'.jpg') dic_rels[pathimg].append((idsub, idrel, idobj)) pb.update() ''' dic_rels = load_relations(filerel, do, dr) print # Load objects logger.info('Loading information from file: {}'.format(fileobj)) flis = fh.LisFile(fileobj) nb_frames = flis.nb_frames() pb = pbar.ProgressBar(nb_frames) logger.info('Processing {} frames.'.format(nb_frames)) with flis as fin: for pathimg, arr in flis.iterate_frames(): sub_boxes, obj_boxes, vrels = [], [], [] for idsub, idrel, idobj in dic_rels[pathimg]: relation = np.array([idsub, idrel, idobj]) for i in range(len(arr)): obj, x, y, w, h = arr[i] iobj = do[obj] if iobj == idsub: bbox_sub = np.array([x, y, x + w, y + h]) if iobj == idobj: bbox_obj = np.array([x, y, x + w, y + h]) sub_boxes = add_element(sub_boxes, bbox_sub) obj_boxes = add_element(obj_boxes, bbox_obj) vrels = add_element(vrels, relation) dgt['sub_bboxes'].append(sub_boxes) dgt['obj_bboxes'].append(obj_boxes) dgt['tuple_label'].append(vrels) pb.update() dgt['sub_bboxes'] = np.array(dgt['sub_bboxes']) dgt['obj_bboxes'] = np.array(dgt['obj_bboxes']) dgt['tuple_label'] = np.array(dgt['tuple_label']) logger.info('Saving pickle file...') fout = open(output, 'wb') cPickle.dump(dgt, fout) fout.close() logger.info('Saved content in file: {}'.format(output))