How to visualise XGBoost feature importance in Python

In [5]:
def Snippet_187(): 
    print()
    print(format('Hoe to visualise XGBoost feature importance in Python','*^82'))    
    
    import warnings
    warnings.filterwarnings("ignore")

    # load libraries
    from sklearn import datasets
    from sklearn import metrics
    from xgboost import XGBClassifier, plot_importance
    from sklearn.model_selection import train_test_split
    import matplotlib.pyplot as plt    
    
    plt.style.use('ggplot')

    # load the wine datasets
    dataset = datasets.load_wine()
    X = dataset.data; y = dataset.target
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25)
    
    # fit a ensemble.AdaBoostClassifier() model to the data
    model = XGBClassifier()
    model.fit(X_train, y_train)
    print(); print(model)
    
    # make predictions
    expected_y  = y_test
    predicted_y = model.predict(X_test)
    
    # summarize the fit of the model
    print(); print('XGBClassifier: ')
    print(); print(metrics.classification_report(expected_y, predicted_y, 
                   target_names=dataset.target_names))
    print(); print(metrics.confusion_matrix(expected_y, predicted_y))

    plt.figure(figsize=(12,12))
    plt.bar(range(len(model.feature_importances_)), model.feature_importances_)
    plt.show()

    plt.figure(figsize=(12,12))
    plt.barh(range(len(model.feature_importances_)), model.feature_importances_)
    plt.show()    

    fig, ax = plt.subplots(figsize=(12,12))
    plot_importance(model, ax = ax)
    plt.show()
Snippet_187()
**************Hoe to visualise XGBoost feature importance in Python***************

XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1,
       colsample_bytree=1, gamma=0, learning_rate=0.1, max_delta_step=0,
       max_depth=3, min_child_weight=1, missing=None, n_estimators=100,
       n_jobs=1, nthread=None, objective='multi:softprob', random_state=0,
       reg_alpha=0, reg_lambda=1, scale_pos_weight=1, seed=None,
       silent=True, subsample=1)

XGBClassifier: 

              precision    recall  f1-score   support

     class_0       0.92      1.00      0.96        11
     class_1       1.00      0.89      0.94        19
     class_2       0.94      1.00      0.97        15

   micro avg       0.96      0.96      0.96        45
   macro avg       0.95      0.96      0.96        45
weighted avg       0.96      0.96      0.96        45


[[11  0  0]
 [ 1 17  1]
 [ 0  0 15]]