Пример #1
0
    def compute_average_shape(self, dataset_path):
        filelist = fp.dir(dataset_path, '.pts')
        im_filelist = fp.dir(dataset_path, '.jpg')
        if len(filelist) == 0:
            ValueError('dataset_path contains no pts file!')
        n = 0
        with open(filelist[0]) as f:
            lines = f.readlines()
            n = int(lines[1].split(':')[1])

        P = np.zeros([n, 2], np.float32)
        p0 = np.zeros([1, 2], np.float32)
        for k, filename in enumerate(filelist):
            im_filename = im_filelist[k]
            im = cv2.imread(im_filename)
            pts = self.read_pts(filename)
            row = np.array([im.shape[1], im.shape[0]])
            pts = pts / row
            p0 += pts[0]
            pts -= pts[0]
            P += pts
        p0 /= len(filelist)
        P /= len(filelist)
        P += p0
        P[:, 1], P[:, 0] = P[:, 0], P[:, 1].copy()
        self.save_pts(self.template_filename, P)
        print(self.template_filename + ' saved!')
Пример #2
0
def ComputeEvalForMethod(fd_rst, fd_gt, NumOfThresholds):
    fnames_rst = fp.dir(fd_rst, ['.jpg', '.png', '.tiff'])
    fnames_gt = fp.dir(fd_gt, ['.jpg', '.png', '.tiff'])

    assert len(fnames_rst) == len(fnames_gt), 'len(fnames_rst) == len(fnames_gt)'

    NumOfSamples = len(fnames_rst)

    EvalMatrix = np.zeros([NumOfSamples, 5, NumOfThresholds], np.float32)

    thresholds = np.linspace(0, 1, NumOfThresholds)

    for k in tqdm.tqdm(range(NumOfSamples)):
        fname_rst = fnames_rst[k]
        fname_gt = fnames_gt[k]

        rst = cv2.imread(fname_rst, cv2.IMREAD_GRAYSCALE)
        gt = cv2.imread(fname_gt, cv2.IMREAD_GRAYSCALE)

        rst = rst.astype(np.float32) / 255.0
        gt = gt.astype(np.float32) / 255.0

        for j, thres in enumerate(thresholds):
            Precion, Recall, IoU, FPR, F1 = SegEval(rst >= thres, gt >= 0.01)
            EvalMatrix[k,:,j] = Precion, Recall, IoU, FPR, F1

    Mu = np.mean(EvalMatrix, axis=0)
    Var = np.mean(EvalMatrix, axis=0)

    np.save(os.path.join(fd_rst, 'EvalMatrix.npy'), EvalMatrix)
    np.save(os.path.join(fd_rst, 'Mu.mu'), Mu)
    np.save(os.path.join(fd_rst, 'Var.var'), Var)
Пример #3
0
 def preload(self, fd_npy):
     fnames_npy = fp.dir(fd_npy, '.npy')
     data_npy = [np.load(fname, 'r') for fname in fnames_npy]
     Process = psutil.Process(os.getpid())
     #tr = tqdm.trange(len(fnames_npy), desc='Memory Cost', leave=True)
     #for d in data_npy:
     #    logger.info(d.shape)
     Ns = [d.shape[0] for d in data_npy]
     N = sum(Ns)
     return N, data_npy, data_npy[0].shape[1:]
Пример #4
0
 def _get_filenames_helper(fd, title2folder, exts):
     filenames = {}
     if len(title2folder) == 0:
         return filenames
     for title, path in title2folder.items():
         fullpath = os.path.join(fd, path)
         fnames = [
             os.path.join('./', path, fp.stem(x))
             for x in fp.dir(fullpath, exts, case_sensitive=True)
         ]
         filenames[title] = fnames
     flens = [len(fnames) for k, fnames in filenames.items()]
     minlen = min(flens)
     filenames = {k: fnames[:minlen] for k, fnames in filenames.items()}
     return filenames
Пример #5
0
def copy_specified_files(fd, fd_out, ext, indices, suffix):
    fp.mkdir(fd_out)
    sub_fds = fp.subdirs(fd)
    for sub_fd in sub_fds:
        fnames = fp.dir(sub_fd, ext)
        fnames = [fnames[idx] for idx in indices]
        path, name, ext_ = fp.fileparts(sub_fd)
        sub_fd_out = os.path.join(fd_out, name + ext_ + suffix)
        fp.mkdir(sub_fd_out)
        cmd = 'cp '
        for fname in fnames:
            cmd += fname + ' '
        cmd += sub_fd_out
        print(cmd)
        os.system(cmd)
Пример #6
0
    def FindBestModel(self):
        fname_ckpt_orig = os.path.join(self.FLAGS.log_path, 'checkpoint')
        fname_ckpt_backup = os.path.join(self.FLAGS.log_path,
                                         'checkpoint-backup')
        try:
            cmd = 'cp {} {}'.format(fname_ckpt_orig, fname_ckpt_backup)
            os.system(cmd)
        except:
            logger.warning('The original checkpoint file not backup-ed!')
            return

        try:
            #self.restore_best_model()
            fname_ckpt_models = fp.dir(self.FLAGS.log_path,
                                       '.meta',
                                       case_sensitive=True)
            fname_ckpt_models = fname_ckpt_models[::-1]
            for fname in fname_ckpt_models:
                if 'temp_model' in fname:
                    continue
                fname = fname.rstrip('.meta')
                fname = fname.replace('\\', '/')
                print(fname)
                with open(fname_ckpt_orig, 'w') as f:
                    f.write('model_checkpoint_path: "{}"\n'.format(fname))
                    f.write('all_model_checkpoint_paths: "{}"\n'.format(fname))
                eval_loss_vals = self.Evaluate()
                [
                    logger.info(line)
                    for line in pyutils.dict_to_string(eval_loss_vals, 3)
                ]
                self.record_best_model(self.best_model_table, eval_loss_vals,
                                       fname)
                print(fname + ' END')
        except Exception as e:
            traceback.print_exc()
            cmd = 'cp {} {}'.format(fname_ckpt_backup, fname_ckpt_orig)
            os.system(cmd)
            logger.error('ERROR')
        else:
            cmd = 'cp {} {}'.format(fname_ckpt_backup, fname_ckpt_orig)
            os.system(cmd)
Пример #7
0
 def preload(self, fd_npy):
     fnames_npy = fp.dir(fd_npy, '.npy')
     data_npy = [np.load(f, 'r') for f in fnames_npy]
     Ns = [d.shape[0] for d in data_npy]
     N = sum(Ns)
     return N, data_npy, data_npy[0].shape[1:]