print(
                    "Counting histogram..., config=%s, distance=%s, depth=%s" %
                    (sample_cache, d_type, depth))

            samples = []
            data = db.get_data()
            for d in data.itertuples():
                d_img, d_cls = getattr(d, "img"), getattr(d, "cls")
                d_hist = self.gabor_histogram(d_img,
                                              type=h_type,
                                              n_slice=n_slice)
                samples.append({'img': d_img, 'cls': d_cls, 'hist': d_hist})
            cPickle.dump(
                samples, open(os.path.join(cache_dir, sample_cache), "wb",
                              True))

        return samples


if __name__ == "__main__":
    db = Database()

    # evaluate database
    APs = evaluate_class(db, f_class=Gabor, d_type=d_type, depth=depth)
    cls_MAPs = []
    for cls, cls_APs in APs.items():
        MAP = np.mean(cls_APs)
        print("Class {}, MAP {}".format(cls, MAP))
        cls_MAPs.append(MAP)
    print("MMAP", np.mean(cls_MAPs))
示例#2
0
            data = db.get_data()
            for d in data.itertuples():
                d_img, d_cls = getattr(d, "img"), getattr(d, "cls")
                d_hist = self.histogram(d_img, type=h_type, n_bin=n_bin, n_slice=n_slice)
                samples.append({
                    'img': d_img,
                    'cls': d_cls,
                    'hist': d_hist
                })
            cPickle.dump(samples, open(os.path.join(cache_dir, sample_cache), "wb", True))

        return samples


if __name__ == "__main__":
    db = Database()
    data = db.get_data()
    color = Color()

    # test normalize
    hist = color.histogram(data.ix[0, 0], type='global')
    assert hist.sum() - 1 < 1e-9, "normalize false"


    # test histogram bins
    def sigmoid(z):
        a = 1.0 / (1.0 + np.exp(-1. * z))
        return a


    np.random.seed(0)
示例#3
0
                        inputs = torch.autograd.Variable(
                            torch.from_numpy(img).float())
                    d_hist = vgg_model(inputs)[pick_layer]
                    d_hist = np.sum(d_hist.data.cpu().numpy(), axis=0)
                    d_hist /= np.sum(d_hist)  # normalize
                    samples.append({
                        'img': d_img,
                        'cls': d_cls,
                        'hist': d_hist
                    })
                except BaseException:
                    pass
            cPickle.dump(
                samples, open(os.path.join(cache_dir, sample_cache), "wb",
                              True))

        return samples


if __name__ == "__main__":
    # evaluate database
    DB = Database()
    APs = evaluate_class(DB, f_class=VGGNetFeat, d_type=d_type, depth=depth)
    cls_MAPs = []

    for cls, cls_APs in APs.items():
        MAP = np.mean(cls_APs)
        print("Class {}, MAP {}".format(cls, MAP))
        cls_MAPs.append(MAP)
    print("MMAP", np.mean(cls_MAPs))