Пример #1
0
def saveEvalResults(generator, cfg, obj_mapping, hoi_mapping, evalData=None):
    
    my_output_path = cfg.results_path + 'hoi' + cfg.my_results_dir + '/res/' + generator.data_type + generator.approach + '/'
    
    path = cfg.part_results_path + cfg.dataset + "/hoi" + cfg.my_results_dir
    mode = generator.data_type
    
    if not os.path.exists(path):
        path = path[:-1]
    path += '/'
    nb_empty = 0
    if evalData is None:
        evalData = []
        for batchidx, (imageID, imageMeta) in enumerate(generator.imagesMeta.items()):
            if (batchidx+1) % (max(1,generator.nb_batches // 100)) == 0 or batchidx==1 or (batchidx+1) == generator.nb_batches:
                utils.update_progress_new(batchidx+1, generator.nb_batches, imageID)
            
            if os.path.exists(my_output_path + imageID + '.pkl'):
                data = utils.load_obj(my_output_path + imageID)
                if data is not None and len(data) > 0:
                    evalData.extend(data)
            else:
                nb_empty += 1
                
    evalData = cp.copy(evalData)
    mAP, AP, _ = metrics.computeHOImAP(evalData, generator.imagesMeta, obj_mapping, hoi_mapping, cfg)
    saveMeta = {'mAP': mAP, 'zAP': AP.tolist(), 'nb_empties': nb_empty}
    utils.save_dict(saveMeta, path+mode+'_mAP')
    print('mAP', mode, mAP)
    print('empties', nb_empty)
Пример #2
0
def computeRPNARHelper(predMeta, GTMeta):
    import filters_helper as helper

    nb_gt_samples = len(GTMeta)

    Ps = [100, 300]

    IoU = np.zeros((2, nb_gt_samples))

    for idx in range(nb_gt_samples):

        if idx % 100 == 0:
            utils.update_progress_new(idx, nb_gt_samples, '')
        gt = GTMeta[idx]
        bbox = gt['bbox']
        imageID = gt['image_id']

        pred_bboxes = np.array(predMeta[imageID])

        overlaps = helper._computeIoUs(bbox, pred_bboxes)

        ol_idxs = np.argsort(overlaps)[::-1]
        max_iou = np.max(overlaps)

        done_Ps = [False for _ in range(len(Ps))]
        for ol_idx in ol_idxs:
            overlap = overlaps[ol_idx]
            pred_bbox = pred_bboxes[ol_idx, :]
            top = pred_bbox[4]
            for P_idx, P in enumerate(Ps):
                if done_Ps[P_idx] or top > P:
                    continue
                if top <= P:
                    done_Ps[P_idx] = True
                if overlap >= 0.5:
                    IoU[P_idx, idx] = max_iou  # true positive

            if all(x for x in done_Ps):
                break

    R = [np.sum(x >= 0.5) / nb_gt_samples for x in IoU]

    #    ious = [x/100 for x in range(50,100,5)]
    #    recalls = np.zeros((len(ious)))
    #    for idx, iou in enumerate(ious):
    #        r = np.sum(IoU>=iou) / nb_gt_samples
    #        recalls[idx] = r
    #
    #    AR = np.mean(recalls)

    overlaps = [[ol - 0.5 for ol in x if ol > 0] for x in IoU]
    AR = [2 * np.sum(x) / nb_gt_samples for x in overlaps]

    return AR, R, IoU
Пример #3
0
def loadEvalData(generator, my_output_path):
    evalData = []
    for batchidx, (imageID,
                   imageMeta) in enumerate(generator.imagesMeta.items()):
        if (batchidx + 1) % (max(
                1, generator.nb_batches // 100)) == 0 or batchidx == 1 or (
                    batchidx + 1) == generator.nb_batches:
            utils.update_progress_new(batchidx + 1, generator.nb_batches,
                                      imageID)

        data = utils.load_obj(my_output_path + imageID)
        if data is not None and len(data) > 0:
            evalData.extend(data)

    return evalData
Пример #4
0
def saveEvalData(generator, Stages, cfg, obj_mapping):

    cfg.my_output_path = cfg.results_path + 'det' + cfg.my_results_dir + '/res/' + generator.data_type + '/'

    if not os.path.exists(cfg.my_output_path):
        os.makedirs(cfg.my_output_path)
    save_path = cfg.my_output_path
    print('   save_path:', save_path)

    evalData = []

    imagesIDs = list(generator.imagesMeta.keys())
    r.shuffle(imagesIDs)
    for batchidx, imageID in enumerate(imagesIDs):
        if (batchidx + 1) % (generator.nb_batches //
                             100) == 0 or batchidx == 1 or (
                                 batchidx + 1) == generator.nb_batches:
            utils.update_progress_new(batchidx + 1, generator.nb_batches,
                                      imageID)

        path = save_path + imageID + '.pkl'
        if os.path.exists(path):
            continue
        imageMeta = generator.imagesMeta[imageID]
        imageMeta['id'] = imageID

        imageInputs = generator._getImageInputs(imageID)
        X, imageDims = filters_rpn.prepareInputs(imageMeta,
                                                 generator.images_path, cfg)
        Y_tmp = filters_detection.loadData(imageInputs, cfg)
        proposals, target_labels, target_deltas = Y_tmp
        #STAGE 1
        #        proposals = Stages.stageone([img], y, imageMeta, imageDims)

        #STAGE 2
        bboxes = Stages.stagetwo([X, proposals], imageMeta, imageDims)
        if bboxes is None:
            utils.save_obj(None, save_path + str(imageID))
            continue

        #CONVERT
        evalData = filters_detection.convertResults(bboxes[0], imageMeta,
                                                    obj_mapping,
                                                    imageDims['scale'],
                                                    cfg.rpn_stride)
        utils.save_obj(evalData, save_path + str(imageID))

    return evalData
Пример #5
0
def saveInputData(generator, Stages, cfg, do_train_eval=False):  
    rdir = 'evalnew' if do_train_eval else 'new'
    mode = 'test' if do_train_eval else generator.data_type
    cfg.my_output_path = cfg.results_path + 'det' + cfg.my_results_dir + '/output/' + generator.data_type + rdir + '/'
    
    if not os.path.exists(cfg.my_output_path):
        os.makedirs(cfg.my_output_path)
    if not os.path.exists(cfg.my_output_path):
        raise Exception('Output directory does not exist! %s' % cfg.my_output_path)
    save_path = cfg.my_output_path
    print('   save_path:', save_path)
    print('   mode:', mode)

    genIterator = generator.begin()
    inputMeta = None
    bboxes = None
    
    for batchidx in range(generator.nb_batches):
        X, y, imageMeta, imageDims, times = next(genIterator)
        imageID = imageMeta['imageName'].split('.')[0]
#        
#        imageID = 'HICO_train2015_00019135'
#        imageMeta = generator.imagesMeta[imageID]
#        X, y, imageDims = Stages.stagezero(imageMeta, generator.data_type)
        if (batchidx+1) % (max(1,generator.nb_batches // 100)) == 0 or batchidx==0 or (batchidx+1) == generator.nb_batches:
            utils.update_progress_new(batchidx, generator.nb_batches, imageID)
        
        
        path = save_path + imageID + '.pkl'
        if os.path.exists(path):
            continue
        
        #STAGE 1
        proposals = Stages.stageone([X], y, imageMeta, imageDims)
        
        #STAGE 2
        bboxes = Stages.stagetwo([proposals], imageMeta, imageDims)
        
        #STAGE 3
        all_hbboxes, all_obboxes, all_target_labels, val_map = Stages.stagethree_targets(bboxes, imageMeta, imageDims)
        
        #CONVERT
        inputMeta = filters_hoi.convertData([all_hbboxes, all_obboxes, all_target_labels, val_map], cfg, mode=mode)
        
        utils.save_obj(inputMeta, save_path + imageID)
    return inputMeta, imageID, bboxes
Пример #6
0
def computeRPNARHelperOld(batch, GTMeta, nb_gt_samples):
    import filters_helper as helper

    props = np.array([x['score'] for x in batch])

    sorted_idxs = props.argsort()[::-1]
    nb_rois = len(sorted_idxs)

    IoU = np.zeros((nb_rois))

    for i, idx in enumerate(sorted_idxs):

        if i % 1000 == 0:
            utils.update_progress_new(i, nb_rois, '')
        roi = batch[idx]
        bbox = roi['bbox']
        imageID = roi['image_id']

        GTs = GTMeta[imageID]
        gt_bboxes = GTs['gt_bboxes']

        overlap = helper._computeIoUs(bbox, gt_bboxes)

        max_iou = np.max(overlap)
        best_idx = np.argmax(overlap)

        if max_iou >= 0.5:
            if gt_bboxes[best_idx, 4] == 0:
                IoU[idx] = max_iou  # true positive
                GTMeta[imageID]['gt_bboxes'][best_idx, 4] = 1

    R5 = np.sum(IoU >= 0.5) / nb_gt_samples

    ious = [x / 100 for x in range(50, 100, 5)]
    recalls = np.zeros((len(ious)))
    for idx, iou in enumerate(ious):
        r = np.sum(IoU >= iou) / nb_gt_samples
        recalls[idx] = r

    AR = np.mean(recalls)

    overlaps = [x - 0.5 for x in IoU if x > 0]
    AR = 2 * np.sum(overlaps) / nb_gt_samples

    return AR, R5, IoU
Пример #7
0
def saveEvalData(generator, Stages, cfg):
    genIterator = generator.begin()
    evalData = []

    for i in range(generator.nb_batches):
        [img, proposals], y, imageMeta, imageDims, times = next(genIterator)
        imageID = imageMeta['imageID']
        utils.update_progress_new(i + 1, generator.nb_batches, imageID)

        #STAGE 1
        #        proposals = Stages.stageone(X, y, imageMeta, imageDims)
        proposals = proposals[0, :, :]

        #CONVERT
        evalData += filters_rpn.convertResults(proposals, imageMeta,
                                               imageDims['scale'],
                                               cfg.rpn_stride)

    return evalData
Пример #8
0
def saveInputData(generator, Stages, cfg):
    cfg.my_output_path = cfg.results_path + 'rpn' + cfg.my_results_dir + '/output/'
    if not os.path.exists(cfg.my_output_path):
        raise Exception('Output directory does not exist! %s' %
                        cfg.my_output_path)
    if not os.path.exists(cfg.my_output_path + generator.data_type + '/'):
        os.makedirs(cfg.my_output_path + generator.data_type + '/')
    save_path = cfg.my_output_path + generator.data_type + '/'
    print('   save_path:', save_path)

    genIterator = generator.begin()
    detMeta = {}

    for batchidx in range(generator.nb_batches):

        img, y, imageMeta, imageDims, times = next(genIterator)
        imageID = str(imageMeta['imageID'])
        if (batchidx + 1) % (generator.nb_batches //
                             100) == 0 or batchidx == 1 or (
                                 batchidx + 1) == generator.nb_batches:
            utils.update_progress_new(batchidx + 1, generator.nb_batches,
                                      imageID)

        path = save_path + imageID + '.pkl'
        if os.path.exists(path):
            continue

        #STAGE 1
        proposals = Stages.stageone([img], y, imageMeta, imageDims)

        #STAGE 2
        proposals, target_labels, target_deltas = Stages.stagetwo_targets(
            proposals, imageMeta, imageDims)

        #CONVERT
        if proposals is None:
            utils.save_obj(None, save_path + imageID)
            continue

        detMeta = filters_detection.convertData(
            [proposals, target_labels, target_deltas], cfg)

        utils.save_obj(detMeta, save_path + imageID)
Пример #9
0
def saveEvalData(generator, Stages, cfg, hoi_mapping):
    
    cfg.my_output_path = cfg.results_path + 'hoi' + cfg.my_results_dir + '/res/' + generator.data_type + generator.approach + '/'
    
    if not os.path.exists(cfg.my_output_path):
        os.makedirs(cfg.my_output_path)
    save_path = cfg.my_output_path
    print('   save_path:', save_path)
    
    evalData = []
    imageMeta = None
    imagesIDs = list(generator.imagesMeta.keys())
    r.shuffle(imagesIDs)
    for batchidx, imageID in enumerate(imagesIDs):
        if (batchidx+1) % (max(100,generator.nb_batches // 100)) == 0 or batchidx==1 or (batchidx+1) == generator.nb_batches:
            utils.update_progress_new(batchidx+1, generator.nb_batches, imageID)
                
        path = save_path + imageID + '.pkl'
        if os.path.exists(path):
            continue
        imageMeta = generator.imagesMeta[imageID]
        imageMeta['id'] = imageID
#        [X, all_hbboxes, all_obboxes, all_val_map], all_target_labels, imageMeta, imageDims, _ = next(genIterator)
        imageInputs = generator._getImageInputs(imageID)
        X, imageDims = filters_rpn.prepareInputs(imageMeta, generator.images_path, cfg)
        Y_tmp = filters_hoi.loadData(imageInputs, imageDims, cfg)
            
        if Y_tmp is None:
            utils.save_obj(None, save_path + imageID)
            continue
        
        all_hbboxes, all_obboxes, all_target_labels, all_val_map = Y_tmp

        #STAGE 3
        pred_hbboxes, pred_obboxes, pred_props = Stages.stagethree([X,all_hbboxes,all_obboxes], imageMeta, imageDims, obj_mapping=None)
        if pred_hbboxes is None:
            utils.save_obj(None, save_path + imageID)
            continue
          
        #CONVERT
        evalData = filters_hoi.convertResults(pred_hbboxes, pred_obboxes, pred_props, imageMeta, imageDims['scale'], cfg, hoi_mapping)
        utils.save_obj(evalData, save_path + imageID)
    return evalData, imageMeta
Пример #10
0
def saveInputData(imagesMeta, data_type, cfg):
    load_path = cfg.data_path + 'images/' + data_type + '/'
    save_path = cfg.my_output_path + data_type + 'newest/'

    if not os.path.exists(save_path):
        os.makedirs(save_path)
    print('save_path', save_path)

    nb_images = len(imagesMeta)

    for batchidx, (imageID, imageMeta) in enumerate(imagesMeta.items()):
        imageID = imageMeta['imageName'].split('.')[0]
        utils.update_progress_new(batchidx + 1, nb_images, imageID)

        path = save_path + imageID + '.pkl'
        if os.path.exists(path):
            continue

        img, imageDims = filters_rpn.prepareInputs(imageMeta, load_path, cfg)
        [Y1, Y2, M] = filters_rpn.createTargets(imageMeta, imageDims, cfg)

        rpnMeta = filters_rpn.convertData([Y1, Y2, M], cfg)
        utils.save_obj(rpnMeta, save_path + imageID)
Пример #11
0
def saveEvalData(generator, Stages, cfg, hoi_mapping):
    cfg.my_output_path = cfg.results_path + 'hoi' + cfg.my_results_dir + '/res/' + generator.data_type + 'res/'
    
    if not os.path.exists(cfg.my_output_path):
        os.makedirs(cfg.my_output_path)
    save_path = cfg.my_output_path
    print('   save_path:', save_path)
    
    evalData = []
    imageMeta = None
    imagesIDs = list(generator.imagesMeta.keys())
    r.shuffle(imagesIDs)
    for batchidx, imageID in enumerate(imagesIDs):        
        if (batchidx+1) % (max(100,generator.nb_batches // 100)) == 0 or batchidx==1 or (batchidx+1) == generator.nb_batches:
            utils.update_progress_new(batchidx+1, generator.nb_batches, imageID)
                
        path = save_path + imageID + '.pkl'
        if os.path.exists(path):
            continue
        
        imageMeta = generator.imagesMeta[imageID]
        imageMeta['id'] = imageID
        X, y, imageDims = Stages.stagezero(imageMeta, generator.data_type)

#        gt_label = imageMeta['label']
#        intval_beg = gt_label // 2 * 2
        
        proposals = Stages.stageone([X], y, imageMeta, imageDims)
        bboxes = Stages.stagetwo([proposals], imageMeta, imageDims)
        pred_hbboxes, pred_obboxes, pred_props = Stages.stagethree([bboxes], imageMeta, imageDims)
          
        #CONVERT
        evalData = filters_hoi.convertResults(pred_hbboxes, pred_obboxes, pred_props, imageMeta, imageDims['scale'], cfg, hoi_mapping)
        utils.save_obj(evalData, save_path + imageID)
#        break
    return evalData, imageMeta
Пример #12
0
    print(cfg.my_input_path+'testnew/')

    sys.stdout.flush()

#if True:
    nb_total = np.zeros(cfg.nb_hoi_classes)
    nb_tp = np.zeros(cfg.nb_hoi_classes)
    nb_empty = 0
    nb_total_preds = 0
    for j, (imageID, imageMeta) in enumerate(generator.imagesMeta.items()):
        
#        imageID = 'HICO_train2015_00028567'
#        imageMeta = generator.imagesMeta[imageID]
        
        if (j+1) % 100 == 0:
            utils.update_progress_new((j+1), generator.nb_images, imageID)
        
        img = cv.imread(generator.images_path + imageMeta['imageName'])
        X, imageDims = filters_rpn.prepareInputs(imageMeta, generator.images_path, cfg)
        objs = imageMeta['objects']
        gt_rels = imageMeta['rels']
        gtbboxes = helper._transformGTBBox(objs, obj_mapping, None, scale=imageDims['scale'], rpn_stride=cfg.rpn_stride, dosplit=False)
        checks = np.zeros(len(gt_rels))
        
        if np.max(gtbboxes[:,2]) > 2+imageDims['output_shape'][1] or np.max(gtbboxes[:,3]) > 2+imageDims['output_shape'][0]:
            print('bad bbs', imageID, np.max(gtbboxes[:,2]), np.max(gtbboxes[:,3]), imageDims['output_shape'])
        
        imageInputs = utils.load_obj(cfg.my_input_path + 'testnew/' + imageID)
        
        if imageInputs is None:
            idxs = []
Пример #13
0
generator = genVal
genIterator = generator.begin()

nb_iterations = 1
all_times = np.zeros((nb_iterations, 5))

for i in range(nb_iterations):
    X, y, imageMeta, imageDims, times = next(genIterator)

    #    imageID = 'Play_Saxophone_007'
    #    imageMeta = generator.imagesMeta[imageID]
    #    X, y, imageDims = Stages.stagezero(imageMeta, generator.data_type)
    imageID = imageMeta['imageName'].split('.')[0]

    if (i + 1) % 100 == 0:
        utils.update_progress_new((i + 1), nb_iterations, imageID)
    print('imageID', imageID)
    print('Stage one...')
    proposals, times = Stages.stageone([X], y, imageMeta, imageDims)
    all_times[i, 0:2] = times
    print('Stage two...')
    bboxes, times = Stages.stagetwo([proposals], imageMeta, imageDims)
    all_times[i, 2:4] = times
    print('Stage three...')
    pred_hbboxes, pred_obboxes, pred_props, times = Stages.stagethree(
        [bboxes], imageMeta, imageDims, obj_mapping)
    all_times[i, 4:5] = times

    #    continue

    gt_obj_label = obj_mapping[hoi_mapping[imageMeta['label']]['obj']]
Пример #14
0
#trainer.compileModel(cfg)


def unnormCoords(box, shape):
    xmin = box[1] * shape[1]; xmax = box[3] * shape[1]
    ymin = box[0] * shape[0]; ymax = box[2] * shape[0]
    return [xmin, ymin, xmax-xmin, ymax-ymin]    

idx = 0
j = 0
for i in range(genTest.nb_batches):
    X, y = next(trainIterator)
    break
    j += y.shape[1]
#    utils.update_progress(i / genTest.nb_samples)
    utils.update_progress_new(j, genTest.nb_samples, [y.shape[1],i,0,0], '')
#    continue
#    print('t',X[0].shape, X[1].shape, X[2].shape, y.shape)
    print(X[1].shape)
    
    image = X[0][idx]
    prsBB = X[1][idx][0]
#    objBB = X[2][idx][0]
    y     = y[idx]
    image -= np.min(image)
    image /= 255
    image = image[:,:,(2,1,0)]
#    image = np.fliplr(image)
    prsBB = unnormCoords(prsBB[1:], image.shape)
#    prsBB[0] = image.shape[1] - prsBB[0] - prsBB[2]
#    objBB = unnormCoords(objBB[1:], image.shape)
Пример #15
0
##STAGE 3
#Stages = stages.AllStages(cfg, None, obj_mapping, hoi_mapping, mode='test')
#hbboxes, obboxes, target_labels, val_map = Stages.stagethree_targets(bboxes, imageMeta, imageDims)
#patterns = filters_hoi.createInteractionPatterns(hbboxes, obboxes, cfg)
#CONVERT
#inputMeta = filters_hoi.convertData([hbboxes, obboxes, target_labels, val_map], cfg, mode=genTest.data_type)
#
#utils.save_obj(inputMeta, cfg.part_data_path + imageID)

iterator = genTrain
genItr = iterator.begin()
for batchidx in range(1):
    [img, all_hbboxes, all_obboxes,
     patterns], target_labels, imageMeta, imageDims, _ = next(genItr)
    imageID = imageMeta['imageName']
    utils.update_progress_new(batchidx + 1, iterator.nb_batches, imageID)

    #    [img, hbboxes, obboxes, patterns], target_labels, imageMeta, imageDims, _ = next(genItr)

    X, _ = filters_rpn.prepareInputs(imageMeta, iterator.images_path, cfg)
    imageID = imageMeta['imageName']

#    continue
#    draw.drawPositiveCropHoI(None, None, hcrops, ocrops, patterns, target_labels, imageMeta, imageDims, cfg, obj_mapping)

#    import draw
import draw

img = np.copy(X[0])
img += cfg.PIXEL_MEANS
img = img.astype(np.uint8)
Пример #16
0
if True:
    # Load data
    print('Loading data...')
    data = extract_data.object_data(False)
    cfg = data.cfg

dataset = 'val'

if dataset == 'val':
    print('Validation data')
    imagesMeta = data.valGTMeta
elif dataset == 'test':
    print('Test data')
    imagesMeta = data.testGTMeta
else:
    print('Train data')
    imagesMeta = data.trainGTMeta

nb_images = len(imagesMeta)

from_anchor_path = cfg.part_data_path + cfg.dataset + '/anchors/' + dataset + '/'
to_anchor_path = cfg.data_path + 'anchors/' + dataset + '/'

if not os.path.exists(to_anchor_path):
    os.makedirs(to_anchor_path)

for idx, (imageID, imageMeta) in enumerate(imagesMeta.items()):
    imageName = imageMeta['imageName'].split('.')[0] + '.pkl'
    copyfile(from_anchor_path + imageName, to_anchor_path + imageName)
    utils.update_progress_new(idx + 1, nb_images, imageMeta['imageName'])
Пример #17
0
    bboxes2 = np.copy(bboxes1)
    bboxes2 = filters_detection.prepareInputs(bboxes2, imageDims, imageMeta)

import draw
import filters_detection

img = np.copy(X[0])
img += cfg.PIXEL_MEANS
img = img.astype(np.uint8)
bboxes2 = filters_detection.unprepareInputs(bboxes2, imageDims)

draw.drawOverlapAnchors(img, bboxes2[0], imageMeta, imageDims, cfg)
draw.drawGTBoxes(img, imageMeta, imageDims)

if False:
    redux = {}
    imageID = '487566'
    redux[imageID] = genTrain.imagesInputs[imageID]

    i = 0
    goal = 5000

    for imageID, inputMeta in genTrain.imagesInputs.items():
        redux[imageID] = inputMeta
        utils.update_progress_new(i + 1, goal, imageID)

        if i == goal:
            break
        i += 1

    #utils.save_obj(redux, cfg.my_output_path + 'proposals_redux')
Пример #18
0
    #    imageID='339823'
    #    imageName = '000000339823'

    roisMeta = utils.load_obj(rois_path + imageName)
    target_deltas = np.copy(roisMeta['target_deltas']).tolist()

    roisMeta['rois'] = [[int(x * 1000) for x in box]
                        for box in roisMeta['rois']]
    new_target_deltas = []
    for row in roisMeta['target_deltas']:
        coord = []
        for x in row:
            coord.append(int(x * 1000))
        new_target_deltas.append(coord)
    roisMeta['target_deltas'] = new_target_deltas
    inputsMeta[imageID] = roisMeta

    if idx == 0:
        print('first')
        print('old:')
        print(utils.load_obj(rois_path + imageName))
        print('new')
        print(roisMeta)

    utils.save_obj(inputsMeta[imageID], save_path + dataset + 'new/' + imageID)

    utils.update_progress_new(idx + 1, nb_images, imageID)

#utils.save_obj(inputsMeta, save_path + 'proposals_'+dataset)
Пример #19
0
def saveEvalResults(generator, cfg, obj_mapping, hoi_mapping, evalData=None):
    evalDataInput = evalData
    my_output_path = cfg.results_path + 'hoi' + cfg.my_results_dir + '/res/' + generator.data_type + 'res/'
    
    path = cfg.part_results_path + cfg.dataset + "/hoi" + cfg.my_results_dir
    mode = generator.data_type
    
    if not os.path.exists(path):
        path = path[:-1]
    path += '/'
    nb_empty = 0
    nb_gt_samples = np.zeros(cfg.nb_hoi_classes)
    if evalData is None or True:
        evalData = []
        for batchidx, (imageID, imageMeta) in enumerate(generator.imagesMeta.items()):
            if (batchidx+1) % (max(1,generator.nb_batches // 100)) == 0 or batchidx==1 or (batchidx+1) == generator.nb_batches:
                utils.update_progress_new(batchidx+1, generator.nb_batches, imageID)
            
            gt_label = imageMeta['label']
            
            data = utils.load_obj(my_output_path + imageID)
            best_score = -1
            best_idx = None
            for idx, det in enumerate(data):
                if det['score'] > best_score:
                    best_score = det['score']
                    best_idx = idx
                
            best_data = [data[best_idx]]
            best_data[0]['gt'] = gt_label
            nb_gt_samples[gt_label] += 1
                
            if data is not None and len(data) > 0:
                evalData.extend(best_data)

                
    evalData = evalDataInput
    evalData = cp.copy(evalData)
#    return evalData, None
    
    cfm = np.zeros((cfg.nb_hoi_classes, cfg.nb_hoi_classes))
    
    APs = np.zeros(cfg.nb_hoi_classes)
    for label in range(cfg.nb_hoi_classes):
        subset = [x for x in evalData if x['category_id']==label]
        
        if len(subset)==0:
            continue
        
        props = [x['score'] for x in subset]
        
        nb_preds = len(props)
        idxs = np.argsort(props)[::-1]
        
        tps = np.zeros(nb_preds)
        fps = np.zeros(nb_preds)
        
        nb_class_samples = nb_gt_samples[label]
        
        for i in range(nb_preds):
            idx = idxs[i]
            pred = subset[idx]
            
            cfm[pred['gt'], pred['category_id']] += 1
            
            if pred['category_id'] == pred['gt']:
                tps[i] = 1
            else:
                fps[i] = 1
        
        if np.sum(tps)==0:
            continue
        
        tp = np.cumsum(tps)
        fp = np.cumsum(fps)
        
        print(label, tp[-1], nb_class_samples)
#        break
        recall = tp / nb_class_samples
        precision = tp / (fp+tp)
        

        Ps = np.zeros((11))
        for rec in range(0,10):
            idxs = np.where(recall>= rec/10.0)[0]
            if len(idxs) == 0:
                p = 0.0
            else:
                p = np.max(precision[idxs])
            Ps[rec] = p
                
        AP = np.mean(Ps)
        APs[label] = AP
    
    mAP = np.mean(APs)
    return evalData, mAP, cfm
    
    saveMeta = {'mAP': mAP, 'zAP': AP.tolist(), 'nb_empties': nb_empty}
    utils.save_dict(saveMeta, path+mode+'_mAP')
    print('mAP', mode, mAP)
    print('empties', nb_empty)
Пример #20
0

# meta data
data = extract_data.object_data(False)

# config
cfg = data.cfg
obj_mapping = data.class_mapping

# data
genTrain = DataGenerator(imagesMeta = data.trainGTMeta, cfg=cfg, data_type='train', do_meta=True)
#genVal = DataGenerator(imagesMeta = data.valGTMeta, cfg=cfg, data_type='val', do_meta=True)


genItr = genTrain.begin()
for batchidx in range(genTrain.nb_batches):
    [X,rois], y, imageMeta, imageDims, _ = next(genItr)
#    if batchidx+1 % 100 == 0:
    utils.update_progress_new(batchidx+1, genTrain.nb_batches, '')
    
#    import draw
#    import filters_detection
#    
#    img = np.copy(X[0])
#    img += cfg.PIXEL_MEANS
#    img = img.astype(np.uint8)
#    rois = filters_detection.unprepareInputs(rois, imageDims)
#    
#    draw.drawOverlapAnchors(img, rois[0], imageMeta, imageDims, cfg)
#    
#    break
Пример #21
0
    for i, (imageID, predMeta) in enumerate(predsMeta.items()):
        imageMeta = imagesMeta[str(imageID)]
        gt_bboxes = imageMeta['objects']
        gtbboxes = helper._transformGTBBox(gt_bboxes,
                                           obj_mapping,
                                           None,
                                           scale=[1, 1],
                                           rpn_stride=1,
                                           dosplit=False)

        X, imageDims = filters_rpn.prepareInputs(imageMeta, images_path, cfg)

        pred_bboxes = np.ones((len(predMeta), 6))

        utils.update_progress_new(i + 1, len(predsMeta), imageID)

        for idx, pred in enumerate(predMeta):
            prop = pred['score']
            lbl = obj_mapping[inv_coco_mapping[int(pred['category_id'])]]
            bbox = pred['bbox']
            pred_bboxes[idx, :] = np.copy(
                [x * imageDims['scale'][0] / 16
                 for x in bbox] + [prop] + [lbl])

            bbox[2] += bbox[0]
            bbox[3] += bbox[1]

            if gtbboxes.shape[0] > 0:
                ious = helper._computeIoUs(bbox, gtbboxes)