|
@@ -0,0 +1,55 @@
|
|
1
|
+#-*- coding:utf-8 -*-
|
|
2
|
+import numpy as np
|
|
3
|
+from sklearn.ensemble import GradientBoostingClassifier,GradientBoostingRegressor
|
|
4
|
+from sklearn.model_selection import train_test_split
|
|
5
|
+from sklearn.datasets import load_wine,load_boston
|
|
6
|
+from sklearn import tree
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+def read_data():
|
|
10
|
+ boston = load_boston()
|
|
11
|
+ Xtrain, Xtest, Ytrain, Ytest = train_test_split(boston.data, boston.target, test_size=0.3)
|
|
12
|
+ return Xtrain, Xtest, Ytrain, Ytest
|
|
13
|
+
|
|
14
|
+def init(Ytrain):
|
|
15
|
+ return np.average(Ytrain)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+def fit(Xtrain, Ytrain):
|
|
19
|
+ print("init", Ytrain[:10])
|
|
20
|
+ fx = []
|
|
21
|
+
|
|
22
|
+ fx0 = np.ones(Ytrain.shape[0])*init(Ytrain)
|
|
23
|
+ fx.append(fx0)
|
|
24
|
+
|
|
25
|
+ print("0", fx0[:10])
|
|
26
|
+
|
|
27
|
+ gx = Ytrain
|
|
28
|
+
|
|
29
|
+ for i in range(20):
|
|
30
|
+ # 求残差
|
|
31
|
+ gx = gx - fx0
|
|
32
|
+ print("第", i, '轮 残差', gx[:10])
|
|
33
|
+ clf = tree.DecisionTreeRegressor(criterion="mse", max_features=1, max_depth=4)
|
|
34
|
+ clf.fit(Xtrain, gx)
|
|
35
|
+
|
|
36
|
+ fx0 = clf.predict(Xtrain)
|
|
37
|
+ print("第", i, '轮 结果', fx0[:10])
|
|
38
|
+ fx.append(fx0)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+ gx = np.zeros(Ytrain.shape[0])
|
|
42
|
+ for i in range(len(fx)):
|
|
43
|
+ gx = gx + fx[i]
|
|
44
|
+
|
|
45
|
+ print(gx[:10])
|
|
46
|
+
|
|
47
|
+ sum = 0
|
|
48
|
+ for i in range(Ytrain.shape[0]):
|
|
49
|
+ sum = sum + (gx[i] - Ytrain[i])*(gx[i] - Ytrain[i])
|
|
50
|
+
|
|
51
|
+ print(sum)
|
|
52
|
+
|
|
53
|
+if __name__ == '__main__':
|
|
54
|
+ Xtrain, Xtest, Ytrain, Ytest = read_data()
|
|
55
|
+ fit(Xtrain, Ytrain)
|