-
Notifications
You must be signed in to change notification settings - Fork 6.4k
/
Copy pathbagging_classification.py
91 lines (69 loc) · 2.13 KB
/
bagging_classification.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
# https://deeplearningcourses.com/c/machine-learning-in-python-random-forest-adaboost
# https://www.udemy.com/machine-learning-in-python-random-forest-adaboost
from __future__ import print_function, division
from builtins import range, input
# Note: you may need to update your version of future
# sudo pip install -U future
import numpy as np
import matplotlib.pyplot as plt
from sklearn.tree import DecisionTreeClassifier
from sklearn.utils import shuffle
from util import plot_decision_boundary
np.random.seed(10)
# create the data
N = 500
D = 2
X = np.random.randn(N, D)
# 2 gaussians
# sep = 1.5
# X[:N/2] += np.array([sep, sep])
# X[N/2:] += np.array([-sep, -sep])
# Y = np.array([0]*(N/2) + [1]*(N/2))
# noisy XOR
sep = 2
X[:125] += np.array([sep, sep])
X[125:250] += np.array([sep, -sep])
X[250:375] += np.array([-sep, -sep])
X[375:] += np.array([-sep, sep])
Y = np.array([0]*125 + [1]*125 + [0]*125 + [1]*125)
# plot the data
plt.scatter(X[:,0], X[:,1], s=100, c=Y, alpha=0.5)
plt.show()
# lone decision tree
model = DecisionTreeClassifier()
model.fit(X, Y)
print("score for 1 tree:", model.score(X, Y))
# plot data with boundary
plt.scatter(X[:,0], X[:,1], s=100, c=Y, alpha=0.5)
plot_decision_boundary(X, model)
plt.show()
# create the bagged model
class BaggedTreeClassifier:
def __init__(self, B):
self.B = B
def fit(self, X, Y):
N = len(X)
self.models = []
for b in range(self.B):
idx = np.random.choice(N, size=N, replace=True)
Xb = X[idx]
Yb = Y[idx]
model = DecisionTreeClassifier(max_depth=2)
model.fit(Xb, Yb)
self.models.append(model)
def predict(self, X):
# no need to keep a dictionary since we are doing binary classification
predictions = np.zeros(len(X))
for model in self.models:
predictions += model.predict(X)
return np.round(predictions / self.B)
def score(self, X, Y):
P = self.predict(X)
return np.mean(Y == P)
model = BaggedTreeClassifier(200)
model.fit(X, Y)
print("score for bagged model:", model.score(X, Y))
# plot data with boundary
plt.scatter(X[:,0], X[:,1], s=100, c=Y, alpha=0.5)
plot_decision_boundary(X, model)
plt.show()