Esempio n. 1
0
def validate_predictions(test_artifact):
    test_model, test_points = [
        test_artifact[k] for k in ['model', 'validation']
    ]

    # Make sure we have a reasonable set of test points
    assert len(test_points) > 4
    assert not any([t == test_points[0] for t in test_points[1:]])

    ins = [t['input'] for t in test_points]
    outs = np.array([t['output'] for t in test_points])

    model = create_model(test_model, EXTRA_PARAMS)
    remodel = round_trip(test_model, model)

    converted = legacy_convert(test_model)
    legacy_model = create_model(converted, EXTRA_PARAMS)
    legacy_remodel = round_trip(converted, legacy_model)

    for i, true_pred in enumerate(outs):
        mod_pred = model([ins[i]])
        legacy_pred = legacy_model(ins[i])

        outstr = '\nPred: %s\nExpt: %s' % (str(mod_pred[0]), str(true_pred))
        legstr = '\nLegacy: %s\nExpt: %s' % (str(
            legacy_pred[0]), str(true_pred))

        assert np.allclose(mod_pred[0], true_pred, atol=1e-7), outstr
        assert np.allclose(legacy_pred[0], true_pred, atol=1e-7), legstr

    compare_predictions(model, ins, outs)
    compare_predictions(remodel, ins, outs)
    compare_predictions(legacy_remodel, ins, outs)
Esempio n. 2
0
def round_trip(settings, wrapper):
    remove_temp_files()

    short = remove_weights(settings)
    # Assure we get a small network short network even when the network is big
    max_len = min(128000, len(json.dumps(settings)))
    assert len(json.dumps(short)) < max_len

    # start = time.time()
    wrapper.save_weights(TEMP_WEIGHTS)
    # print('save weights: %.2f' % (time.time() - start))
    # start = time.time()
    new_wrapper = create_model(short, EXTRA_PARAMS)
    # print('recreate model: %.2f' % (time.time() - start))
    # start = time.time()
    new_wrapper._model.load_weights(TEMP_WEIGHTS)
    # print('load weights: %.2f' % (time.time() - start))
    # print(os.path.getsize(TEMP_WEIGHTS))

    # start = time.time()
    new_wrapper.save_bundle(TEMP_BUNDLE)
    # print('save bundle: %.2f' % (time.time() - start))
    # start = time.time()
    bundle_wrapper = create_model(TEMP_BUNDLE)
    # print('load bundle: %.2f' % (time.time() - start))
    # print(os.path.getsize(TEMP_BUNDLE))

    # start = time.time()
    new_wrapper.save_tfjs(TEMP_TFJS)
    # print('save tfjs: %.2f' % (time.time() - start))

    remove_temp_files()

    return bundle_wrapper
Esempio n. 3
0
def validate_predictions(test_artifact):
    test_model, test_points = [
        test_artifact[k] for k in ["model", "validation"]
    ]

    # Make sure we have a reasonable set of test points
    assert len(test_points) > 4
    assert not any([t == test_points[0] for t in test_points[1:]])

    ins = [t["input"] for t in test_points]
    outs = np.array([t["output"] for t in test_points])

    model = create_model(test_model, EXTRA_PARAMS)
    remodel = round_trip(test_model, model)

    converted = legacy_convert(test_model)
    legacy_model = create_model(converted, EXTRA_PARAMS)
    legacy_remodel = round_trip(converted, legacy_model)

    for i, true_pred in enumerate(outs):
        mod_pred = model([ins[i]])
        remod_pred = remodel([ins[i]])
        legacy_pred = legacy_model(ins[i])
        legacy_remod_pred = legacy_remodel(ins[i])

        outstr = "\nPred: %s\nExpt: %s" % (str(mod_pred[0]), str(true_pred))
        legstr = "\nLegacy: %s\nExpt: %s" % (
            str(legacy_pred[0]),
            str(true_pred),
        )

        assert np.allclose(mod_pred[0], true_pred, atol=1e-7), outstr
        assert np.allclose(legacy_pred[0], true_pred, atol=1e-7), legstr

    none_point = list(ins[0])
    none_point[0] = None

    model(none_point)
    remodel(none_point)

    # start = time.time()
    compare_predictions(model, ins, outs)
    # print('model preds: %.2f' % (time.time() - start))
    # start = time.time()
    compare_predictions(remodel, ins, outs)
    # print('remodel preds: %.2f' % (time.time() - start))
    # start = time.time()
    compare_predictions(legacy_remodel, ins, outs)
Esempio n. 4
0
def classify(network_name, accuracy_threshold):
    network = get_pretrained_network(network_name)
    nlayers = len(network["image_network"]["layers"])
    noutputs = network["image_network"]["metadata"]["outputs"]
    preprocessors = network["preprocess"]

    pixel_model = create_image_model(network_name, None)

    assert len(get_image_layers(pixel_model)) == nlayers

    for image, cidx in CLASSIFIER_TEST_IMAGES:
        image_path = os.path.join(TEST_IMAGE_DATA, image)
        point = load_points(preprocessors, [[image_path]])
        pred = pixel_model.predict(point)

        check_image_prediction(pred, cidx, accuracy_threshold, 0.02)

    ex_mod = image_feature_extractor(pixel_model)
    bundle_mod = create_model(get_extractor_bundle(network_name))

    read = reader_for_network(network_name, None)
    img_arrays = np.array(
        [read(im[0]).numpy() for im in CLASSIFIER_TEST_IMAGES])

    bundle_outputs = bundle_mod(img_arrays)
    ex_outputs = ex_mod(img_arrays)

    assert ex_outputs.shape == (2, noutputs)
    assert bundle_outputs.shape == (2, noutputs)

    abs_out = np.abs(ex_outputs - bundle_outputs)
    assert np.mean(abs_out) < 1e-5, abs_out
Esempio n. 5
0
def round_trip(settings, wrapper):
    assert not os.path.exists(TEMP_WEIGHTS)

    short = remove_weights(settings)
    # Assure we get a small network short network even when the network is big
    max_len = min(128000, len(json.dumps(settings)))
    assert len(json.dumps(short)) < max_len

    wrapper._model.save_weights(TEMP_WEIGHTS)
    new_wrapper = create_model(short, EXTRA_PARAMS)
    new_wrapper._model.load_weights(TEMP_WEIGHTS)

    os.remove(TEMP_WEIGHTS)
    assert not os.path.exists(TEMP_WEIGHTS)

    return new_wrapper
Esempio n. 6
0
def test_text():
    with gzip.open(TEXT_MODEL_PATH, "rb") as fin:
        network = json.load(fin)

    text_model = create_model(network)
    assert text_model._model is not None
Esempio n. 7
0
def make_mobilenet(settings):
    with gzip.open(MOBILENET_PATH, "rb") as fin:
        network = json.load(fin)

    return create_model(network, settings)