#importing dependencies
from sklearn import datasets
import xgboost as xgb
#load data
iris = datasets.load_iris()
X = iris.data #feature data
y = iris.target #target data
#create a train-test split, 80-20% split
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2)
#tranform to the XGBoost format (DMatrix)
D_train = xgb.DMatrix(X_train, label=y_train)
D_test = xgb.DMatrix(X_test, label=y_test)
#Defining the XGBoost model
param = {
'eta': 0.3,
'max_depth': 3,
'objective': 'multi:softprob',
'num_class': 3
}
steps = 20 #number of training iterations
#trainig and testing
model = xgb.train(param, D_train, steps)
#model evaluation
import numpy as np
from sklearn.metrics import precision_score, recall_score, accuracy_score
preds = model.predict(D_test)
best_preds = np.asarray([np.argmax(line) for line in preds])
print("Precision = {}".format(precision_score(y_test, best_preds, average='macro')))
print("Recall = {}".format(recall_score(y_test, best_preds, average='macro')))
print("Accuracy = {}".format(accuracy_score(y_test, best_preds)))