from sklearn.datasets import load_iris from sklearn.ensemble import ExtraTreesClassifier # Load the iris dataset iris = load_iris() # Create an instance of the ExtraTreesClassifier clf = ExtraTreesClassifier(n_estimators=100, random_state=0) # Fit the classifier to the iris dataset clf.fit(iris.data, iris.target) # Print the feature importances print(clf.feature_importances_)
from sklearn.datasets import make_classification from sklearn.ensemble import ExtraTreesClassifier import matplotlib.pyplot as plt from mlxtend.plotting import plot_decision_regions # Generate a synthetic dataset with 2 features X, y = make_classification(n_samples=100, n_features=2, n_redundant=0, n_informative=2, random_state=42) # Create an instance of the ExtraTreesClassifier clf = ExtraTreesClassifier(n_estimators=100, random_state=0) # Fit the classifier to the synthetic dataset clf.fit(X, y) # Visualize the decision surface of the classifier plot_decision_regions(X, y, clf=clf, legend=2) plt.show()The sklearn.ensemble package provides many ensemble algorithms for classification and regression tasks. The ExtraTreesClassifier algorithm is one of them and is particularly useful for datasets with high dimensionality and noisy data.