def test_ray():
    """Test the ray result matches the sequential one
	"""

    for n, p in [[100, 10], [1000, 30], [1000, 100]]:

        X, y = make_classification(n_samples=n,
                                   n_features=p,
                                   n_informative=int(0.1 * p),
                                   n_redundant=0,
                                   n_repeated=0,
                                   n_classes=2,
                                   n_clusters_per_class=1,
                                   flip_y=0.1,
                                   shift=0.0,
                                   scale=1.0,
                                   shuffle=False,
                                   random_state=123)
        y = y[:, np.newaxis]

        X_train, X_test, y_train, y_test = train_test_split(X,
                                                            y,
                                                            test_size=0.2,
                                                            random_state=123)

        bnn = BnnBinaryClassifier(verbose=0).fit(X_train, y_train)
        M_F, V_F = bnn.logit_posterior(X_test)

        seq_result = rate(X_test, M_F, V_F)
        ray_seq_result = RATE_ray(X_test, M_F, V_F, n_jobs=1)
        ray_par_result = RATE_ray(X_test, M_F, V_F, n_jobs=2)
        assert np.array_equal(seq_result, ray_seq_result)
        assert np.array_equal(seq_result, ray_par_result)
                        gbm_mimic, gbm_mimic_time = train_mimic(
                            RandomizedSearchCV(
                                GradientBoostingRegressor(),
                                gbm_param_grid(p),
                                n_iter=n_search_iter,
                                cv=k,
                                n_jobs=n_jobs),
                            bnn, X_train, bnn_soft_predictions, X_test, n_mc_samples, True
                        )
                        variable_importances = add_importance_scores(
                            "gradient boosting machine mimic", gbm_mimic.best_estimator_.feature_importances_, dict_key, repeat_idx)
                        timings = add_timing("gradient boosting machine mimic", gbm_mimic_time, dict_key, repeat_idx)

                        # RATE
                        M_F, V_F = bnn.logit_posterior(X_test)
                        rate_vals, rate_time = RATE2(X_test, M_F, V_F, return_time=True)
                        variable_importances = add_importance_scores("RATE", rate_vals, dict_key, repeat_idx)
                        timings = add_timing("RATE", rate_time, dict_key, repeat_idx)

                        # Saliency-style maps, averaged over examples to give global importance. Average is of absolute value
                        with DeepExplain(session=K.get_session()) as de:
                            input_tensor = nn.layers[0].input
                            target_tensor = Model(inputs=input_tensor, outputs=nn.layers[-2].output)(input_tensor)

                            for attr_method in ["grad*input", "saliency", "intgrad", "elrp", "occlusion", "shapley_sampling"]:
                                arg_dict = {'samples' : 10} if "attr_method"=="shapley_sampling" else {}
                                s_time = time.time()
                                imp_vals = de.explain(
                                    attr_method,
                                    target_tensor, input_tensor,