def test_parameter_passing(self): for gen in [DML, NonParamDML]: est = gen(model_y=LinearRegression(), model_t=LinearRegression(), model_final=LinearRegression(), mc_iters=2, mc_agg='median') assert est.mc_iters == 2 assert est.mc_agg == 'median' for gen in [LinearDML, SparseLinearDML, KernelDML, ForestDML]: est = gen(model_y=LinearRegression(), model_t=LinearRegression(), mc_iters=2, mc_agg='median') assert est.mc_iters == 2 assert est.mc_agg == 'median' for gen in [ DRLearner, LinearDRLearner, SparseLinearDRLearner, ForestDRLearner ]: est = gen(mc_iters=2, mc_agg='median') assert est.mc_iters == 2 assert est.mc_agg == 'median' for gen in [ DMLATEIV(model_Y_W=LinearRegression(), model_T_W=LinearRegression(), model_Z_W=LinearRegression(), mc_iters=2, mc_agg='median'), ProjectedDMLATEIV(model_Y_W=LinearRegression(), model_T_W=LinearRegression(), model_T_WZ=LinearRegression(), mc_iters=2, mc_agg='median'), DMLIV(model_Y_X=LinearRegression(), model_T_X=LinearRegression(), model_T_XZ=LinearRegression(), model_final=LinearRegression(), mc_iters=2, mc_agg='median'), NonParamDMLIV(model_Y_X=LinearRegression(), model_T_X=LinearRegression(), model_T_XZ=LinearRegression(), model_final=LinearRegression(), mc_iters=2, mc_agg='median'), IntentToTreatDRIV(model_Y_X=LinearRegression(), model_T_XZ=LinearRegression(), flexible_model_effect=LinearRegression(), mc_iters=2, mc_agg='median'), LinearIntentToTreatDRIV( model_Y_X=LinearRegression(), model_T_XZ=LinearRegression(), flexible_model_effect=LinearRegression(), mc_iters=2, mc_agg='median') ]: assert est.mc_iters == 2 assert est.mc_agg == 'median'
def test_multidim_arrays_fail(self): Y = np.array([2, 3, 1, 3, 2, 1, 1, 1]) three_class = np.array([1, 2, 3, 1, 2, 3, 1, 2]) two_class = np.array([1, 2, 1, 1, 2, 1, 1, 2]) est = NonParamDMLIV(model_Y_X=Lasso(), model_T_X=LogisticRegression(), model_T_XZ=LogisticRegression(), model_final=WeightedLasso(), discrete_treatment=True) with pytest.raises(AttributeError): est.fit(Y, T=three_class, Z=two_class) est = IntentToTreatDRIV(model_Y_X=Lasso(), model_T_XZ=LogisticRegression(), flexible_model_effect=WeightedLasso()) with pytest.raises(AttributeError): est.fit(Y, T=three_class, Z=two_class) with pytest.raises(AttributeError): est.fit(Y, T=two_class, Z=three_class)
def test_orthoiv_random_state(self): Y, T, X, W, X_test = self._make_data(500, 2) for est in [ OrthoIV(model_y_xw=RandomForestRegressor(n_estimators=10, max_depth=4, random_state=123), model_t_xw=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), model_z_xw=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), discrete_treatment=True, discrete_instrument=True, cv=2, random_state=123), NonParamDMLIV( model_y_xw=RandomForestRegressor(n_estimators=10, max_depth=4, random_state=123), model_t_xw=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), model_t_xwz=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), model_final=LinearRegression(), discrete_treatment=True, discrete_instrument=True, cv=2, random_state=123), LinearDRIV(model_y_xw=RandomForestRegressor(n_estimators=10, max_depth=4, random_state=123), model_t_xw=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), model_z_xw=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), model_tz_xw=RandomForestClassifier( n_estimators=10, max_depth=4, random_state=123), flexible_model_effect=StatsModelsLinearRegression( fit_intercept=False), discrete_treatment=True, discrete_instrument=True, cv=2, random_state=123), IntentToTreatDRIV( model_y_xw=RandomForestRegressor(n_estimators=10, max_depth=4, random_state=123), model_t_xwz=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), flexible_model_effect=RandomForestRegressor( n_estimators=10, max_depth=4, random_state=123), cv=2, random_state=123), LinearIntentToTreatDRIV( model_y_xw=RandomForestRegressor(n_estimators=10, max_depth=4, random_state=123), model_t_xwz=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), flexible_model_effect=RandomForestRegressor( n_estimators=10, max_depth=4, random_state=123), cv=2, random_state=123) ]: TestRandomState._test_random_state(est, X_test, Y, T, X=X, W=W, Z=T)
def test_orthoiv_random_state(self): Y, T, X, W, X_test = self._make_data(500, 2) for est in [ DMLATEIV(model_Y_W=RandomForestRegressor(n_estimators=10, max_depth=4, random_state=123), model_T_W=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), model_Z_W=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), discrete_treatment=True, discrete_instrument=True, cv=2, random_state=123), ProjectedDMLATEIV( model_Y_W=RandomForestRegressor(n_estimators=10, max_depth=4, random_state=123), model_T_W=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), model_T_WZ=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), discrete_treatment=True, discrete_instrument=True, cv=2, random_state=123) ]: TestRandomState._test_random_state(est, None, Y, T, W=W, Z=T) for est in [ DMLIV(model_Y_X=RandomForestRegressor(n_estimators=10, max_depth=4, random_state=123), model_T_X=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), model_T_XZ=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), model_final=LinearRegression(fit_intercept=False), discrete_treatment=True, discrete_instrument=True, cv=2, random_state=123), NonParamDMLIV( model_Y_X=RandomForestRegressor(n_estimators=10, max_depth=4, random_state=123), model_T_X=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), model_T_XZ=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), model_final=LinearRegression(), discrete_treatment=True, discrete_instrument=True, cv=2, random_state=123) ]: TestRandomState._test_random_state(est, X_test, Y, T, X=X, Z=T) for est in [ IntentToTreatDRIV( model_Y_X=RandomForestRegressor(n_estimators=10, max_depth=4, random_state=123), model_T_XZ=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), flexible_model_effect=RandomForestRegressor( n_estimators=10, max_depth=4, random_state=123), cv=2, random_state=123), LinearIntentToTreatDRIV( model_Y_X=RandomForestRegressor(n_estimators=10, max_depth=4, random_state=123), model_T_XZ=RandomForestClassifier(n_estimators=10, max_depth=4, random_state=123), flexible_model_effect=RandomForestRegressor( n_estimators=10, max_depth=4, random_state=123), cv=2, random_state=123) ]: TestRandomState._test_random_state(est, X_test, Y, T, X=X, W=W, Z=T)
def test_cate_api(self): def const_marg_eff_shape(n, d_x, binary_T): return (n if d_x else 1, ) + ((1, ) if binary_T else ()) def marg_eff_shape(n, binary_T): return (n, ) + ((1, ) if binary_T else ()) def eff_shape(n, d_x): return (n if d_x else 1, ) n = 1000 y = np.random.normal(size=(n, )) for d_w in [None, 10]: if d_w is None: W = None else: W = np.random.normal(size=(n, d_w)) for d_x in [None, 3]: if d_x is None: X = None else: X = np.random.normal(size=(n, d_x)) for binary_T in [True, False]: if binary_T: T = np.random.choice(["a", "b"], size=(n, )) else: T = np.random.normal(size=(n, )) for binary_Z in [True, False]: if binary_Z: Z = np.random.choice(["c", "d"], size=(n, )) else: Z = np.random.normal(size=(n, )) for projection in [True, False]: for featurizer in [ None, PolynomialFeatures(degree=2, include_bias=False), ]: est_list = [ DRIV( flexible_model_effect= StatsModelsLinearRegression( fit_intercept=False), model_final=StatsModelsLinearRegression( fit_intercept=False), fit_cate_intercept=True, projection=projection, discrete_instrument=binary_Z, discrete_treatment=binary_T, featurizer=featurizer, ), LinearDRIV( flexible_model_effect= StatsModelsLinearRegression( fit_intercept=False), fit_cate_intercept=True, projection=projection, discrete_instrument=binary_Z, discrete_treatment=binary_T, featurizer=featurizer, ), SparseLinearDRIV( flexible_model_effect= StatsModelsLinearRegression( fit_intercept=False), fit_cate_intercept=True, projection=projection, discrete_instrument=binary_Z, discrete_treatment=binary_T, featurizer=featurizer, ), ForestDRIV( flexible_model_effect= StatsModelsLinearRegression( fit_intercept=False), projection=projection, discrete_instrument=binary_Z, discrete_treatment=binary_T, featurizer=featurizer, ), ] if X is None: est_list = est_list[:-1] if binary_T and binary_Z: est_list += [ IntentToTreatDRIV( flexible_model_effect= StatsModelsLinearRegression( fit_intercept=False), fit_cate_intercept=True, featurizer=featurizer, ), LinearIntentToTreatDRIV( flexible_model_effect= StatsModelsLinearRegression( fit_intercept=False), featurizer=featurizer, ), ] for est in est_list: with self.subTest(d_w=d_w, d_x=d_x, binary_T=binary_T, binary_Z=binary_Z, projection=projection, featurizer=featurizer, est=est): # ensure we can serialize unfit estimator pickle.dumps(est) est.fit(y, T, Z=Z, X=X, W=W) # ensure we can serialize fit estimator pickle.dumps(est) # expected effect size const_marginal_effect_shape = const_marg_eff_shape( n, d_x, binary_T) marginal_effect_shape = marg_eff_shape( n, binary_T) effect_shape = eff_shape(n, d_x) # test effect const_marg_eff = est.const_marginal_effect( X) self.assertEqual( shape(const_marg_eff), const_marginal_effect_shape) marg_eff = est.marginal_effect(T, X) self.assertEqual( shape(marg_eff), marginal_effect_shape) T0 = "a" if binary_T else 0 T1 = "b" if binary_T else 1 eff = est.effect(X, T0=T0, T1=T1) self.assertEqual( shape(eff), effect_shape) # test inference const_marg_eff_int = est.const_marginal_effect_interval( X) marg_eff_int = est.marginal_effect_interval( T, X) eff_int = est.effect_interval(X, T0=T0, T1=T1) self.assertEqual( shape(const_marg_eff_int), (2, ) + const_marginal_effect_shape) self.assertEqual( shape(marg_eff_int), (2, ) + marginal_effect_shape) self.assertEqual( shape(eff_int), (2, ) + effect_shape) # test can run score est.score(y, T, Z=Z, X=X, W=W) if X is not None: # test cate_feature_names expect_feat_len = featurizer.fit( X ).n_output_features_ if featurizer else d_x self.assertEqual( len(est.cate_feature_names()), expect_feat_len) # test can run shap values shap_values = est.shap_values( X[:10])
def test_cate_api(self): def const_marg_eff_shape(n, d_x, binary_T): """Constant marginal effect shape.""" return (n if d_x else 1,) + ((1,) if binary_T else ()) def marg_eff_shape(n, binary_T): """Marginal effect shape.""" return (n,) + ((1,) if binary_T else ()) def eff_shape(n, d_x): "Effect shape." return (n if d_x else 1,) n = 500 y = np.random.normal(size=(n,)) # parameter combinations to test for d_w, d_x, binary_T, binary_Z, projection, featurizer\ in itertools.product( [None, 10], # d_w [None, 3], # d_x [True, False], # binary_T [True, False], # binary_Z [True, False], # projection [None, PolynomialFeatures(degree=2, include_bias=False), ]): # featurizer if d_w is None: W = None else: W = np.random.normal(size=(n, d_w)) if d_x is None: X = None else: X = np.random.normal(size=(n, d_x)) if binary_T: T = np.random.choice(["a", "b"], size=(n,)) else: T = np.random.normal(size=(n,)) if binary_Z: Z = np.random.choice(["c", "d"], size=(n,)) else: Z = np.random.normal(size=(n,)) est_list = [ DRIV( flexible_model_effect=StatsModelsLinearRegression(fit_intercept=False), model_final=StatsModelsLinearRegression( fit_intercept=False ), fit_cate_intercept=True, projection=projection, discrete_instrument=binary_Z, discrete_treatment=binary_T, featurizer=featurizer, ), LinearDRIV( flexible_model_effect=StatsModelsLinearRegression(fit_intercept=False), fit_cate_intercept=True, projection=projection, discrete_instrument=binary_Z, discrete_treatment=binary_T, featurizer=featurizer, ), SparseLinearDRIV( flexible_model_effect=StatsModelsLinearRegression(fit_intercept=False), fit_cate_intercept=True, projection=projection, discrete_instrument=binary_Z, discrete_treatment=binary_T, featurizer=featurizer, ), ForestDRIV( flexible_model_effect=StatsModelsLinearRegression(fit_intercept=False), projection=projection, discrete_instrument=binary_Z, discrete_treatment=binary_T, featurizer=featurizer, ), ] if X is None: est_list = est_list[:-1] if binary_T and binary_Z: est_list += [ IntentToTreatDRIV( flexible_model_effect=StatsModelsLinearRegression( fit_intercept=False ), fit_cate_intercept=True, featurizer=featurizer, ), LinearIntentToTreatDRIV( flexible_model_effect=StatsModelsLinearRegression( fit_intercept=False ), featurizer=featurizer, ), ] for est in est_list: with self.subTest(d_w=d_w, d_x=d_x, binary_T=binary_T, binary_Z=binary_Z, projection=projection, featurizer=featurizer, est=est): # TODO: serializing/deserializing for every combination -- is this necessary? # ensure we can serialize unfit estimator pickle.dumps(est) est.fit(y, T, Z=Z, X=X, W=W) # ensure we can serialize fit estimator pickle.dumps(est) # expected effect size exp_const_marginal_effect_shape = const_marg_eff_shape(n, d_x, binary_T) marginal_effect_shape = marg_eff_shape(n, binary_T) effect_shape = eff_shape(n, d_x) # assert calculated constant marginal effect shape is expected # const_marginal effect is defined in LinearCateEstimator class const_marg_eff = est.const_marginal_effect(X) self.assertEqual(shape(const_marg_eff), exp_const_marginal_effect_shape) # assert calculated marginal effect shape is expected marg_eff = est.marginal_effect(T, X) self.assertEqual(shape(marg_eff), marginal_effect_shape) T0 = "a" if binary_T else 0 T1 = "b" if binary_T else 1 eff = est.effect(X, T0=T0, T1=T1) self.assertEqual(shape(eff), effect_shape) # test inference const_marg_eff_int = est.const_marginal_effect_interval(X) marg_eff_int = est.marginal_effect_interval(T, X) eff_int = est.effect_interval(X, T0=T0, T1=T1) self.assertEqual(shape(const_marg_eff_int), (2,) + exp_const_marginal_effect_shape) self.assertEqual(shape(marg_eff_int), (2,) + marginal_effect_shape) self.assertEqual(shape(eff_int), (2,) + effect_shape) # test can run score est.score(y, T, Z=Z, X=X, W=W) if X is not None: # test cate_feature_names expect_feat_len = featurizer.fit( X).n_output_features_ if featurizer else d_x self.assertEqual(len(est.cate_feature_names()), expect_feat_len) # test can run shap values _ = est.shap_values(X[:10])
def test_orthoiv(self): y, T, X, W = self._get_data() Z = T.copy() est = OrthoIV(model_y_xw=LinearRegression(), model_t_xw=LinearRegression(), model_z_xw=LinearRegression(), mc_iters=2) est.fit(y, T, Z=Z, W=W, cache_values=True) est.refit_final() est.model_y_xw = Lasso() est.model_t_xw = ElasticNet() est.model_z_xw = WeightedLasso() est.fit(y, T, Z=Z, W=W, cache_values=True) assert isinstance(est.models_nuisance_[0][0]._model_y_xw._model, Lasso) assert isinstance(est.models_nuisance_[0][0]._model_t_xw._model, ElasticNet) assert isinstance(est.models_nuisance_[0][0]._model_z_xw._model, WeightedLasso) est = DMLIV(model_y_xw=LinearRegression(), model_t_xw=LinearRegression(), model_t_xwz=LinearRegression(), model_final=LinearRegression(fit_intercept=False), mc_iters=2) est.fit(y, T, Z=Z, X=X, W=W, cache_values=True) est.model_y_xw = Lasso() est.model_t_xw = ElasticNet() est.model_t_xwz = WeightedLasso() est.fit(y, T, Z=Z, X=X, W=W, cache_values=True) assert isinstance(est.models_nuisance_[0][0]._model_y_xw._model, Lasso) assert isinstance(est.models_nuisance_[0][0]._model_t_xw._model, ElasticNet) assert isinstance(est.models_nuisance_[0][0]._model_t_xwz._model, WeightedLasso) est = NonParamDMLIV(model_y_xw=LinearRegression(), model_t_xw=LinearRegression(), model_t_xwz=LinearRegression(), model_final=LinearRegression(fit_intercept=True), mc_iters=2) est.fit(y, T, Z=Z, X=X, W=W, cache_values=True) est.featurizer = PolynomialFeatures(degree=2, include_bias=False) est.model_final = WeightedLasso() est.refit_final() assert isinstance(est.model_cate, WeightedLasso) assert isinstance(est.featurizer_, PolynomialFeatures) est = IntentToTreatDRIV(model_y_xw=LinearRegression(), model_t_xwz=LogisticRegression(), flexible_model_effect=LinearRegression()) est.fit(y, T, Z=Z, X=X, W=W, cache_values=True) assert est.model_final is None assert isinstance(est.model_final_, LinearRegression) est.flexible_model_effect = Lasso() est.refit_final() assert est.model_final is None assert isinstance(est.model_final_, Lasso) est.model_final = Lasso() est.refit_final() assert isinstance(est.model_final, Lasso) assert isinstance(est.model_final_, Lasso) assert isinstance( est.models_nuisance_[0][0]._prel_model_effect.model_final_, LinearRegression) est.fit(y, T, Z=Z, X=X, W=W, cache_values=True) assert isinstance( est.models_nuisance_[0][0]._prel_model_effect.model_final_, Lasso) est = LinearIntentToTreatDRIV(model_y_xw=LinearRegression(), model_t_xwz=LogisticRegression(), flexible_model_effect=LinearRegression()) est.fit(y, T, Z=Z, X=X, W=W, cache_values=True) est.fit_cate_intercept = False est.intercept_ est.intercept__interval() est.refit_final() with pytest.raises(AttributeError): est.intercept_ with pytest.raises(AttributeError): est.intercept__interval() with pytest.raises(ValueError): est.model_final = LinearRegression() est.flexible_model_effect = Lasso() est.fit(y, T, Z=Z, X=X, W=W, cache_values=True) assert isinstance( est.models_nuisance_[0][0]._prel_model_effect.model_final_, Lasso)
def test_orthoiv(self): y, T, X, W = self._get_data() Z = T.copy() est = DMLATEIV(model_Y_W=LinearRegression(), model_T_W=LinearRegression(), model_Z_W=LinearRegression(), mc_iters=2) est.fit(y, T, W=W, Z=Z, cache_values=True) est.refit_final() est.model_Y_W = Lasso() est.model_T_W = ElasticNet() est.model_Z_W = WeightedLasso() est.fit(y, T, W=W, Z=Z, cache_values=True) assert isinstance(est.models_nuisance_[0][0]._model_Y_W._model, Lasso) assert isinstance(est.models_nuisance_[0][0]._model_T_W._model, ElasticNet) assert isinstance(est.models_nuisance_[0][0]._model_Z_W._model, WeightedLasso) est = ProjectedDMLATEIV(model_Y_W=LinearRegression(), model_T_W=LinearRegression(), model_T_WZ=LinearRegression(), mc_iters=2) est.fit(y, T, W=W, Z=Z, cache_values=True) est.refit_final() est.model_Y_W = Lasso() est.model_T_W = ElasticNet() est.model_T_WZ = WeightedLasso() est.fit(y, T, W=W, Z=Z, cache_values=True) assert isinstance(est.models_nuisance_[0][0]._model_Y_W._model, Lasso) assert isinstance(est.models_nuisance_[0][0]._model_T_W._model, ElasticNet) assert isinstance(est.models_nuisance_[0][0]._model_T_WZ._model, WeightedLasso) est = DMLIV(model_Y_X=LinearRegression(), model_T_X=LinearRegression(), model_T_XZ=LinearRegression(), model_final=LinearRegression(fit_intercept=False), mc_iters=2) est.fit(y, T, X=X, Z=Z, cache_values=True) np.testing.assert_equal(len(est.coef_), X.shape[1]) est.featurizer = PolynomialFeatures(degree=2, include_bias=False) est.refit_final() np.testing.assert_equal(len(est.coef_), X.shape[1]**2) est.intercept_ est.fit_cate_intercept = False est.intercept_ est.refit_final() with pytest.raises(AttributeError): est.intercept_ est.model_Y_X = Lasso() est.model_T_X = ElasticNet() est.model_T_XZ = WeightedLasso() est.fit(y, T, X=X, Z=Z, cache_values=True) assert isinstance(est.models_Y_X[0][0], Lasso) assert isinstance(est.models_T_X[0][0], ElasticNet) assert isinstance(est.models_T_XZ[0][0], WeightedLasso) est = DMLIV(model_Y_X=LinearRegression(), model_T_X=LinearRegression(), model_T_XZ=LinearRegression(), model_final=LinearRegression(fit_intercept=False), mc_iters=2) est.fit(y, T, X=X, Z=Z, cache_values=True) np.testing.assert_equal(len(est.coef_), X.shape[1]) est.featurizer = PolynomialFeatures(degree=2, include_bias=False) est.refit_final() np.testing.assert_equal(len(est.coef_), X.shape[1]**2) est.intercept_ est.fit_cate_intercept = False est.intercept_ est.refit_final() with pytest.raises(AttributeError): est.intercept_ est.model_Y_X = Lasso() est.model_T_X = ElasticNet() est.model_T_XZ = WeightedLasso() est.fit(y, T, X=X, Z=Z, cache_values=True) assert isinstance(est.models_nuisance_[0][0]._model_Y_X._model, Lasso) assert isinstance(est.models_nuisance_[0][0]._model_T_X._model, ElasticNet) assert isinstance(est.models_nuisance_[0][0]._model_T_XZ._model, WeightedLasso) est = NonParamDMLIV(model_Y_X=LinearRegression(), model_T_X=LinearRegression(), model_T_XZ=LinearRegression(), model_final=LinearRegression(fit_intercept=True), mc_iters=2) est.fit(y, T, X=X, Z=Z, cache_values=True) est.featurizer = PolynomialFeatures(degree=2, include_bias=False) est.model_final = WeightedLasso() est.refit_final() assert isinstance(est.model_cate, WeightedLasso) assert isinstance(est.featurizer_, PolynomialFeatures) est = IntentToTreatDRIV(model_Y_X=LinearRegression(), model_T_XZ=LogisticRegression(), flexible_model_effect=LinearRegression()) est.fit(y, T, X=X, W=W, Z=Z, cache_values=True) assert est.model_final is None assert isinstance(est.model_final_, LinearRegression) est.flexible_model_effect = Lasso() est.refit_final() assert est.model_final is None assert isinstance(est.model_final_, Lasso) est.model_final = Lasso() est.refit_final() assert isinstance(est.model_final, Lasso) assert isinstance(est.model_final_, Lasso) assert isinstance( est.models_nuisance_[0][0]._prel_model_effect.model_final_, LinearRegression) est.fit(y, T, X=X, W=W, Z=Z, cache_values=True) assert isinstance( est.models_nuisance_[0][0]._prel_model_effect.model_final_, Lasso) est = LinearIntentToTreatDRIV(model_Y_X=LinearRegression(), model_T_XZ=LogisticRegression(), flexible_model_effect=LinearRegression()) est.fit(y, T, X=X, W=W, Z=Z, cache_values=True) est.fit_cate_intercept = False est.intercept_ est.intercept__interval() est.refit_final() with pytest.raises(AttributeError): est.intercept_ with pytest.raises(AttributeError): est.intercept__interval() with pytest.raises(ValueError): est.model_final = LinearRegression() est.flexible_model_effect = Lasso() est.fit(y, T, X=X, W=W, Z=Z, cache_values=True) assert isinstance( est.models_nuisance_[0][0]._prel_model_effect.model_final_, Lasso)