-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathxg_boost.py
71 lines (55 loc) · 2.45 KB
/
xg_boost.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
# -*- coding: utf-8 -*-
from math import sqrt
from sklearn import datasets
from sklearn.metrics import accuracy_score
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split
from xgboost import XGBClassifier
from xgboost import XGBRegressor
"""A simple XGBoost model.
O'Reilly E-book page:
https://learning.oreilly.com/library/view/building-machine-learning/9781484244708/html/463852_1_En_23_Chapter.xhtml
"""
# Make the models deterministic
RANDOM_SEED = 42
class StochasticGradientBoostingDemo:
"""XGBoost is short for Extreme Gradient Boosting makes a couple of computational and algorithmic
modifications to the stochastic gradient boosting algorithm.
"""
def make_prediction(self, data, dataset):
# Separate features and target
X = data[0]
y = data[1]
# Split in train and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, shuffle=True)
# Create the model
model = self.get_model(dataset)
# Fit the model on the training set
model.fit(X_train, y_train)
# Make predictions on the test set
predictions = model.predict(X_test)
# Evaluate the model performance.
if dataset == 'flowers':
accuracy = accuracy_score(y_test, predictions)
print('Classifier accuracy {:.2f}'.format(accuracy))
else:
rmse = sqrt(mean_squared_error(y_test, predictions))
print('Regression root mean squared error {:.2f}'.format(rmse))
@staticmethod
def get_model(dataset):
"""Train a classifier or a regression model with an XGBoost algorithm.
Note that there are MANY hyperparameters you pass into these models.
Refer to the online XGBoost docs for more information."""
if dataset == 'flowers':
return XGBClassifier(random_state=RANDOM_SEED)
else:
return XGBRegressor(random_state=RANDOM_SEED)
if __name__ == "__main__":
# Get some sample data from sklearn datasets. Setting return_X_y to True will
# constrain the output to be a tuple containing only the data and the targets.
flower_data = datasets.load_iris(return_X_y=True)
housing_data = datasets.load_boston(return_X_y=True)
# Predict with the two models and the two datasets.
predictor = StochasticGradientBoostingDemo()
predictor.make_prediction(flower_data, 'flowers')
predictor.make_prediction(housing_data, 'housing')