def Snippet_187():
print()
print(format('Hoe to visualise XGBoost feature importance in Python','*^82'))
import warnings
warnings.filterwarnings("ignore")
# load libraries
from sklearn import datasets
from sklearn import metrics
from xgboost import XGBClassifier, plot_importance
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
plt.style.use('ggplot')
# load the wine datasets
dataset = datasets.load_wine()
X = dataset.data; y = dataset.target
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25)
# fit a ensemble.AdaBoostClassifier() model to the data
model = XGBClassifier()
model.fit(X_train, y_train)
print(); print(model)
# make predictions
expected_y = y_test
predicted_y = model.predict(X_test)
# summarize the fit of the model
print(); print('XGBClassifier: ')
print(); print(metrics.classification_report(expected_y, predicted_y,
target_names=dataset.target_names))
print(); print(metrics.confusion_matrix(expected_y, predicted_y))
plt.figure(figsize=(12,12))
plt.bar(range(len(model.feature_importances_)), model.feature_importances_)
plt.show()
plt.figure(figsize=(12,12))
plt.barh(range(len(model.feature_importances_)), model.feature_importances_)
plt.show()
fig, ax = plt.subplots(figsize=(12,12))
plot_importance(model, ax = ax)
plt.show()
Snippet_187()