コード例 #1
0
def tf_keras_adult_explainer(request, tf_keras_adult):
    X_train, model, cat_vars_ohe = tf_keras_adult

    shape = (1, 57)
    cf_explainer = CounterFactualProto(model,
                                       shape,
                                       beta=.01,
                                       cat_vars=cat_vars_ohe,
                                       ohe=True,
                                       use_kdtree=request.param[0],
                                       max_iterations=1000,
                                       c_init=request.param[1],
                                       c_steps=request.param[2],
                                       feature_range=(-1 * np.ones(
                                           (1, 12)), np.ones((1, 12))))
    yield X_train, model, cf_explainer
コード例 #2
0
ファイル: test_cfproto.py プロジェクト: LeonChou5311/alibi
def tf_keras_adult_explainer(request, models, adult_data):
    shape = (1, 57)
    cat_vars_ohe = adult_data['metadata']['cat_vars_ohe']
    cf_explainer = CounterFactualProto(models[0],
                                       shape,
                                       beta=.01,
                                       cat_vars=cat_vars_ohe,
                                       ohe=True,
                                       use_kdtree=request.param[0],
                                       max_iterations=1000,
                                       c_init=request.param[1],
                                       c_steps=request.param[2],
                                       feature_range=(-1 * np.ones(
                                           (1, 12)), np.ones((1, 12))))
    yield models[0], cf_explainer
    keras.backend.clear_session()
    tf.keras.backend.clear_session()
コード例 #3
0
ファイル: test_cfproto.py プロジェクト: strategist922/alibi
def iris_explainer(logistic_iris):
    X, y, clf = logistic_iris

    # define prediction function
    predict_fn = lambda x: clf.predict_proba(x)

    # initialize explainer
    shape = (1, 4)
    sess = tf.Session()
    sess.run(tf.global_variables_initializer())
    cf = CounterFactualProto(sess,
                             predict_fn, (1, 4),
                             use_kdtree=True,
                             max_iterations=500,
                             theta=10.,
                             feature_range=(X.min(axis=0).reshape(shape),
                                            X.max(axis=0).reshape(shape)),
                             c_init=1.,
                             c_steps=3)
    yield X, y, clf, predict_fn, cf
    sess.close()
コード例 #4
0
def tf_keras_iris_explainer(request, tf_keras_iris):
    X_train, model, ae, enc = tf_keras_iris

    if request.param[0]:  # use k-d trees
        ae = None
        enc = None

    shape = (1, 4)
    cf_explainer = CounterFactualProto(
        model,
        shape,
        gamma=100,
        theta=100,
        ae_model=ae,
        enc_model=enc,
        use_kdtree=request.param[0],
        max_iterations=1000,
        c_init=request.param[1],
        c_steps=request.param[2],
        feature_range=(X_train.min(axis=0).reshape(shape),
                       X_train.max(axis=0).reshape(shape)))
    yield X_train, model, cf_explainer
コード例 #5
0
ファイル: test_cfproto.py プロジェクト: LeonChou5311/alibi
def tf_keras_iris_explainer(request, models, iris_data):
    X_train = iris_data['X_train']
    model, ae, enc = models
    if request.param[0]:  # use k-d trees
        ae = None
        enc = None

    shape = (1, 4)
    cf_explainer = CounterFactualProto(
        model,
        shape,
        gamma=100,
        theta=100,
        ae_model=ae,
        enc_model=enc,
        use_kdtree=request.param[0],
        max_iterations=1000,
        c_init=request.param[1],
        c_steps=request.param[2],
        feature_range=(X_train.min(axis=0).reshape(shape),
                       X_train.max(axis=0).reshape(shape)))
    yield model, cf_explainer
    keras.backend.clear_session()
    tf.keras.backend.clear_session()
コード例 #6
0
ファイル: slides.ipynb.py プロジェクト: Chau999/OSG_Digital
# Evaluation
score = nn.evaluate(x_test, y_test, verbose=0)
print('Test accuracy: ', score[1])

# Generate counterfactual
X = x_test[1].reshape((1, ) + x_test[1].shape)
shape = X.shape

tf.compat.v1.disable_eager_execution()
# initialize explainer, fit and generate counterfactual
cf = CounterFactualProto(nn,
                         shape,
                         use_kdtree=True,
                         theta=10.,
                         max_iterations=1000,
                         feature_range=(x_train.min(axis=0),
                                        x_train.max(axis=0)),
                         c_init=1.,
                         c_steps=10)

cf.fit(x_train)
explanation = cf.explain(X)

print(f'Original prediction: {explanation.orig_class}')
print('Counterfactual prediction: {}'.format(explanation.cf['class']))

# Examine the explanation
explanation['cf']

orig = X * sigma + mu
コード例 #7
0
ファイル: mlobject.py プロジェクト: viadee/eric
    def cf_proto_connector(self, target, query_instance):

        predict_fn = lambda x: self.model['classifier'].predict_proba(x)

        preprocessed_instance = self.model['preprocessor'].transform(
            pd.DataFrame([query_instance]))

        categories = self.getCategoricalFeatures()
        continuous = self.getContinuousFeatures()
        print(categories)
        print(continuous)

        new_instance = {}

        cat_vars = {}
        start = len(continuous)  #number of continuous features
        for f in categories:
            numbers_features = len(np.unique(self.X_train[f]))
            cat_vars[start] = numbers_features
            start = start + numbers_features

        transformed_training = self.model['preprocessor'].transform(
            self.X_train)

        cf = CounterFactualProto(
            predict_fn,
            shape=np.shape(preprocessed_instance),
            beta=0.1,
            cat_vars=cat_vars,
            ohe=True,
            max_iterations=2000,
            feature_range=(np.zeros((1, len(self.featureNames))),
                           np.ones((1, len(self.featureNames)))),
            #feature_range= (np.array([[-1, -1, -1, -1, -1, -1]]), np.array([[1, 1, 1, 1, 1, 1]])),
            c_init=1.,
            c_steps=5,
            eps=(.1, .1)  # perturbation size for numerical gradients
        )

        cf.fit(transformed_training, d_type='abdm', disc_perc=[25, 50, 75])

        explanation = cf.explain(X=preprocessed_instance,
                                 target_class=[target])

        if explanation['cf'] != None:

            print("FINISHED!!!!!!")
            print(explanation['cf']['X'])
            print(explanation['cf']['X'][0][len(categories):])

            one_hot_training = self.X_train[categories].to_numpy()
            one = OneHotEncoder(handle_unknown='ignore')
            one.fit(one_hot_training)
            inverse_one_hot = one.inverse_transform(
                [explanation['cf']['X'][0][len(continuous):]])

            scaler_training = self.X_train[continuous].to_numpy()
            scaler = MinMaxScaler()
            scaler.fit(scaler_training)
            inverse_scale = scaler.inverse_transform(
                [explanation['cf']['X'][0][0:len(continuous)]])

            for i in range(len(categories)):
                new_instance[categories[i]] = inverse_one_hot[0][i]

            for i in range(len(continuous)):
                new_instance[continuous[i]] = round(inverse_scale[0][i], 2)

            return new_instance
        else:
            return None