Esempio n. 1
0
                else:
                    loss_midn = midn_loss.item()
                    loss_oicr = oicr_loss.item()
                    loss_oicr1 = oc1.item()
                    loss_oicr2 = oc2.item()
                    loss_oicr3 = oc3.item()
                    fg_cnt = torch.sum(rois_label.data.ne(0))
                    bg_cnt = rois_label.data.numel() - fg_cnt
                    record_module = OICR

                a = torch.max(im_rois[:, :, 1]).data
                b = torch.max(im_rois[:, :, 2]).data
                c = torch.max(im_rois[:, :, 3]).data
                d = torch.max(im_rois[:, :, 4]).data

                logger.log("[session %d][epoch %2d][iter %4d/%4d] loss: %.4f, lr: %.2e" \
                                 % (args.session, epoch, step, iters_per_epoch, loss_temp, lr))
                logger.log("\t\t\tfg/bg=(%d/%d), time cost: %f" %
                           (fg_cnt, bg_cnt, end - start))
                #print("\t\t\tmidn : %.4f, oicr : %.4f" % (loss_midn, loss_oicr))
                logger.log("\t\t\tmidn : %.4f, oicr : %.4f" %
                           (loss_midn_temp, loss_oicr_temp))
                logger.log("\t\t\tdata loading : %.4f" % (loading_time))
                loading_time = 0
                print(c, d, im_data.shape)
                logger.log("Logging to {}".format(log_dir))
                # end batch

                # logging
                logger.record_tabular('loss', loss_temp)
                logger.record_tabular('midn_loss', loss_midn_temp)
                logger.record_tabular('oicr_loss', loss_oicr_temp)
Esempio n. 2
0
    for j in range(imdb.num_classes):
        for i in range(num_images):
            inds = np.where(all_boxes[j][i][:, -1] > thresh[j])[0]
            all_boxes[j][i] = all_boxes[j][i][inds, :]

    model_save_gt = {'images': images_real, 'gt': gt}
    sio.savemat('{}_gt.mat'.format(imdb.name), model_save_gt)

    det_file = os.path.join(output_dir_map, 'detections.pkl')
    with open(det_file, 'wb') as f:
        pickle.dump(all_boxes, f, pickle.HIGHEST_PROTOCOL)

    det_file = os.path.join(output_dir_corloc, 'discovery.pkl')
    with open(det_file, 'wb') as f:
        pickle.dump(all_boxes_corloc, f, pickle.HIGHEST_PROTOCOL)

    # due to memory issue
#    det_file_all = os.path.join(output_dir, 'detections_all.pkl')
#    results_all = {'scores_all' : scores_all, 'boxes_all' : boxes_all}
#    with open(det_file_all, 'wb') as f:
#        pickle.dump(results_all, f, pickle.HIGHEST_PROTOCOL)

    print('Applying NMS to all detections')
    nms_dets = apply_nms(all_boxes, cfg.TEST.NMS)

    logger.log('Evaluating detections')
    imdb.evaluate_detections(nms_dets, output_dir_map)

    logger.log('Evaluating CorLoc')
    imdb.evaluate_discovery(all_boxes_corloc, output_dir_corloc)