コード例 #1
0
ファイル: __main__.py プロジェクト: gsunner/seldon-core
def main():
    args, extra = parse_args(sys.argv[1:])
    # Pretrained Alibi explainer
    alibi_model = None
    keras_model = None

    predict_fn = construct_predict_fn(
        predictor_host=args.predictor_host,
        model_name=args.model_name,
        protocol=Protocol(args.protocol),
        tf_data_type=args.tf_data_type,
    )

    if args.storage_uri is not None:
        # we assume here that model is local
        path = args.storage_uri

        if is_persisted_explainer(path):
            alibi_model = get_persisted_explainer(predict_fn=predict_fn,
                                                  dirname=path)

        if is_persisted_keras(path):
            keras_model = get_persisted_keras(path)

    explainer = AlibiExplainer(
        name=args.model_name,
        predict_fn=predict_fn,
        method=ExplainerMethod(args.command),
        config=extra,
        explainer=alibi_model,
        protocol=Protocol(args.protocol),
        keras_model=keras_model,
    )
    explainer.load()
    ExplainerServer(args.http_port).start(explainer)
コード例 #2
0
def main():
    args, extra = parse_args(sys.argv[1:])
    # Pretrained Alibi explainer
    alibi_model = None
    keras_model = None
    if args.storage_uri is not None:
        path = kfserving.Storage.download(args.storage_uri)
        alibi_model = os.path.join(path, EXPLAINER_FILENAME)
        if os.path.exists(alibi_model):
            with open(alibi_model, "rb") as f:
                logging.info("Loading Alibi model")
                alibi_model = dill.load(f)
        else:
            keras_path = os.path.join(path, KERAS_MODEL)
            if os.path.exists(keras_path):
                with open(keras_path, "rb") as f:
                    logging.info("Loading Keras model")
                    keras_model = keras.models.load_model(keras_path)

    explainer = AlibiExplainer(args.model_name, args.predictor_host,
                               ExplainerMethod(args.command), extra,
                               alibi_model, Protocol(args.protocol),
                               args.tf_data_type, keras_model)
    explainer.load()
    ExplainerServer(args.http_port).start(explainer)
コード例 #3
0
def test_anchor_images_parser():
    args = [
        "--predictor_host",
        PREDICTOR_HOST,
        "AnchorImages",
        "--p_sample",
        str(P_SAMPLE),
    ]
    parser, _ = parse_args(args)
    assert parser.predictor_host == PREDICTOR_HOST
    assert parser.explainer.p_sample == P_SAMPLE
コード例 #4
0
def test_integrated_gradients_parser():
    args = [
        "--predictor_host", PREDICTOR_HOST, "IntegratedGradients", "--method",
        str(METHOD), "--layer",
        str(LAYER), "--n_steps",
        str(N_STEPS), "--internal_batch_size",
        str(INTERNAL_BATCH_SIZE)
    ]
    parser, _ = parse_args(args)
    assert parser.predictor_host == PREDICTOR_HOST
    assert parser.explainer.method == METHOD
    assert parser.explainer.layer == LAYER
    assert parser.explainer.n_steps == N_STEPS
    assert parser.explainer.internal_batch_size == INTERNAL_BATCH_SIZE
コード例 #5
0
def test_shared_explainer_args():
    args = [
        "--predictor_host",
        PREDICTOR_HOST,
        "AnchorTabular",
        "--threshold",
        str(THRESHOLD),
        "--delta",
        str(DELTA),
        "--tau",
        str(TAU),
        "--batch_size",
        str(BATCH_SIZE),
        "--coverage_samples",
        str(COVERAGE_SAMPLES),
        "--beam_size",
        str(BEAM_SIZE),
        "--stop_on_first",
        str(STOP_ON_FIRST),
        "--max_anchor_size",
        str(MAX_ANCHOR_SIZE),
        "--max_samples_start",
        str(MAX_SAMPLES_START),
        "--n_covered_ex",
        str(N_COVERED_EX),
        "--binary_cache_size",
        str(BINARY_CACHE_SIZE),
        "--cache_margin",
        str(CACHE_MARGIN),
        "--verbose",
        str(VERBOSE),
        "--verbose_every",
        str(VERBOSE_EVERY),
    ]
    parser, _ = parse_args(args)
    assert parser.explainer.threshold == THRESHOLD
    assert parser.explainer.delta == DELTA
    assert parser.explainer.tau == TAU
    assert parser.explainer.batch_size == BATCH_SIZE
    assert parser.explainer.coverage_samples == COVERAGE_SAMPLES
    assert parser.explainer.beam_size == BEAM_SIZE
    assert parser.explainer.stop_on_first == STOP_ON_FIRST
    assert parser.explainer.max_anchor_size == MAX_ANCHOR_SIZE
    assert parser.explainer.max_samples_start == MAX_SAMPLES_START
    assert parser.explainer.n_covered_ex == N_COVERED_EX
    assert parser.explainer.binary_cache_size == BINARY_CACHE_SIZE
    assert parser.explainer.cache_margin == CACHE_MARGIN
    assert parser.explainer.verbose == VERBOSE
    assert parser.explainer.verbose_every == VERBOSE_EVERY
コード例 #6
0
ファイル: __main__.py プロジェクト: MATRIX4284/kf-serving-pro
def main():
    args, extra = parse_args(sys.argv[1:])
    # Pretrained Alibi explainer
    alibi_model = None
    if args.storage_uri is not None:
        alibi_model = os.path.join(
            kfserving.Storage.download(args.storage_uri), EXPLAINER_FILENAME)
        with open(alibi_model, "rb") as f:
            logging.info("Loading Alibi model")
            alibi_model = dill.load(f)

    explainer = AlibiExplainer(
        args.model_name,
        args.predictor_host,
        ExplainerMethod(args.command),
        extra,
        alibi_model,
    )
    explainer.load()
    kfserving.KFServer().start(models=[explainer])
コード例 #7
0
def test_anchor_text_parser():
    args = [
        "--predictor_host",
        PREDICTOR_HOST,
        "AnchorText",
        "--use_unk",
        str(USE_UNK),
        "--use_similarity_proba",
        str(USE_SIMILARITY_PROBA),
        "--sample_proba",
        str(SAMPLE_PROBA),
        "--top_n",
        str(TOP_N),
        "--temperature",
        str(TEMPERATURE),
    ]
    parser, _ = parse_args(args)
    assert parser.predictor_host == PREDICTOR_HOST
    assert parser.explainer.use_unk == USE_UNK
    assert parser.explainer.use_similarity_proba == USE_SIMILARITY_PROBA
    assert parser.explainer.sample_proba == SAMPLE_PROBA
    assert parser.explainer.top_n == TOP_N
    assert parser.explainer.temperature == TEMPERATURE
コード例 #8
0
def test_basic_args():
    args = ["--predictor_host", PREDICTOR_HOST]
    parser, _ = parse_args(args)
    assert parser.predictor_host == PREDICTOR_HOST