my_gbdt_classic.py 2.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. #-*- coding:utf-8 -*-
  2. import numpy as np
  3. from sklearn.ensemble import GradientBoostingClassifier,GradientBoostingRegressor
  4. from sklearn.model_selection import train_test_split
  5. from sklearn.datasets import load_wine,load_boston,load_breast_cancer
  6. from sklearn import tree
  7. def read_data():
  8. boston = load_breast_cancer()
  9. Xtrain, Xtest, Ytrain, Ytest = train_test_split(boston.data, boston.target, test_size=0.3)
  10. for i in range(len(Ytrain)):
  11. if Ytrain[i] == 0:
  12. Ytrain[i] = -1
  13. for i in range(len(Ytest)):
  14. if Ytest[i] == 0:
  15. Ytest[i] = -1
  16. return Xtrain, Xtest, Ytrain, Ytest
  17. def init(Ytrain):
  18. positive = sum(Ytrain == 1)
  19. negative = Ytrain.shape[0] - positive
  20. p = np.log2(positive/negative) # 可能是为了训练稍微快点
  21. return np.ones(Ytrain.shape[0])*p
  22. def fit(Xtrain, Ytrain):
  23. print("init", Ytrain[:10])
  24. fx = []
  25. clf_tress = []
  26. fx0 = init(Ytrain)
  27. fx.append(fx0)
  28. print("0", fx0[:10])
  29. gx = fx0
  30. for i in range(10):
  31. # 求伪残差
  32. hx_0 = []
  33. for j in range(Ytrain.shape[0]):
  34. p = Ytrain[j] / (np.exp2(Ytrain[j]*gx[j]) + 1)
  35. hx_0.append(p)
  36. print("第", i, '轮 残差', gx[:10])
  37. clf = tree.DecisionTreeRegressor(criterion="mse", max_features=1, max_depth=1)
  38. clf.fit(Xtrain, np.array(hx_0))
  39. clf_tress.append(clf)
  40. fx_i = clf.predict(Xtrain)*0.7
  41. print("第", i, '轮 结果', fx_i[:10])
  42. fx.append(fx_i)
  43. gx = gx + fx_i
  44. gx = np.zeros(Ytrain.shape[0])
  45. for i in range(len(fx)):
  46. gx = gx + fx[i]
  47. print(gx[:10])
  48. gx = np.sign(gx)
  49. p = sum(gx==Ytrain)/Ytrain.shape[0]
  50. print("准确率", p)
  51. return clf_tress, fx0[0]
  52. def score(Xtest, Ytest, trees, fx0):
  53. gx = np.ones(Ytest.shape[0])*fx0
  54. for i in range(len(trees)):
  55. gx = gx + trees[i].predict(Xtest)
  56. gx = np.sign(gx)
  57. p = sum(gx == Ytest) / Ytest.shape[0]
  58. print("准确率", p)
  59. gx = np.sign(trees[0].predict(Xtest))
  60. p = sum(gx == Ytest) / Ytest.shape[0]
  61. print("准确率0", p)
  62. if __name__ == '__main__':
  63. Xtrain, Xtest, Ytrain, Ytest = read_data()
  64. trees,fx0 = fit(Xtrain, Ytrain)
  65. score(Xtest, Ytest, trees, fx0)
  66. gbm1 = GradientBoostingClassifier(n_estimators=10, max_depth=1, learning_rate=0.7,
  67. max_features='sqrt', random_state=10)
  68. gbm1.fit(Xtrain, Ytrain)
  69. print("gbdt", gbm1.score(Xtest, Ytest))