Exemplo n.º 1
0
def measure_top1_accuracy(model_chkpt, dataset, parent_chkpt=None):
    if dataset == "cifar10":
        trainx, trainy, testx, testy, valx, valy = cifar.load_cifar_data(
            "/workspace/finn/dataset", download=True, one_hot=False
        )
    elif dataset == "mnist":
        trainx, trainy, testx, testy, valx, valy = mnist.load_mnist_data(
            "/workspace/finn/dataset", download=True, one_hot=False
        )
    else:
        raise Exception("Unrecognized dataset")
    # move from dataset_loader layout to ONNX layout: NHWC -> NCHW
    testx = testx.transpose(0, 3, 1, 2)
    model = ModelWrapper(model_chkpt)
    iname = model.graph.input[0].name
    oname = model.graph.output[0].name
    if parent_chkpt is None:
        ishape = model.get_tensor_shape(iname)
    else:
        parent_model = ModelWrapper(parent_chkpt)
        parent_iname = parent_model.graph.input[0].name
        ishape = parent_model.get_tensor_shape(parent_iname)
    ok = 0
    nok = 0
    n_batches = testx.shape[0]
    for i in range(n_batches):
        tdata = testx[i].reshape(ishape).astype(np.float32)
        exp = testy[i].item()
        if parent_chkpt is not None:
            y = execute_parent(parent_chkpt, model_chkpt, tdata)
        else:
            y = execute_onnx(model, {iname: tdata}, False)[oname]
        ret = y.item()
        if ret == exp:
            ok += 1
        else:
            nok += 1
        if i % 10 == 0:
            print("%d : OK %d NOK %d " % (i, ok, nok))
    acc_top1 = ok * 100.0 / (ok + nok)
    warnings.warn("Final OK %d NOK %d top-1 %f" % (ok, nok, acc_top1))
    return acc_top1
Exemplo n.º 2
0
def main(args):
    trainx, trainy, testx, testy, valx, valy = mnist.load_mnist_data(
        "./data", download=False, one_hot=False)

    accel = tfc_w1a1_mnist()

    print("Expected input shape and datatype: %s %s" %
          (str(accel.ishape_normal), str(accel.idt)))
    print("Expected output shape and datatype: %s %s" %
          (str(accel.oshape_normal), str(accel.odt)))

    batch_size = 1000
    total = testx.shape[0]
    accel.batch_size = batch_size
    n_batches = int(total / batch_size)

    batch_imgs = testx.reshape(n_batches, batch_size, -1)
    batch_labels = testy.reshape(n_batches, batch_size)
    obuf_normal = np.empty_like(accel.obuf_packed_device)
    print("Ready to run validation, test images tensor has shape %s" %
          str(batch_imgs.shape))
    print("Accelerator buffer shapes are %s for input, %s for output" %
          (str(accel.ishape_packed), str(accel.oshape_packed)))

    ok = 0
    nok = 0
    for i in range(n_batches):
        ibuf_normal = batch_imgs[i].reshape(accel.ishape_normal)
        exp = batch_labels[i]
        obuf_normal = accel.execute(ibuf_normal)
        ret = np.bincount(obuf_normal.flatten() == exp.flatten())
        nok += ret[0]
        ok += ret[1]
        print("batch %d / %d : total OK %d NOK %d" % (i, n_batches, ok, nok))

    acc = 100.0 * ok / (total)
    print("Final accuracy: {}%".format(acc))
    return 0
Exemplo n.º 3
0
                        default="resizer.bit")
    parser.add_argument("--dataset_root",
                        help="dataset root dir for download/reuse",
                        default="/tmp")
    # parse arguments
    args = parser.parse_args()
    bsize = args.batchsize
    dataset = args.dataset
    bitfile = args.bitfile
    platform = args.platform
    dataset_root = args.dataset_root

    if dataset == "mnist":
        from dataset_loading import mnist

        trainx, trainy, testx, testy, valx, valy = mnist.load_mnist_data(
            dataset_root, download=True, one_hot=False)
    elif dataset == "cifar10":
        from dataset_loading import cifar

        trainx, trainy, testx, testy, valx, valy = cifar.load_cifar_data(
            dataset_root, download=True, one_hot=False)
    else:
        raise Exception("Unrecognized dataset")

    test_imgs = testx
    test_labels = testy

    ok = 0
    nok = 0
    total = test_imgs.shape[0]