gbdt_train.py 1.3 KB

1234567891011121314151617181920212223242526272829303132333435363738
  1. #coding=utf-8
  2. #coding:utf-8
  3. #-*- coding:utf-8 -*-
  4. import numpy as np
  5. from sklearn.ensemble import GradientBoostingClassifier,GradientBoostingRegressor
  6. from sklearn.model_selection import train_test_split
  7. from sklearn.datasets import load_wine,load_boston
  8. wine = load_wine()
  9. Xtrain, Xtest, Ytrain, Ytest = train_test_split(wine.data,wine.target,test_size=0.3)
  10. print(Ytrain)
  11. #默认参数
  12. #Accuracy : 0.9856
  13. #AUC Score (Train): 0.862264
  14. gbm1 = GradientBoostingClassifier( n_estimators=500,max_depth=10,max_features='sqrt', random_state=10)
  15. gbm1.fit(Xtrain,Ytrain)
  16. print("gbdt1",gbm1.score(Xtest,Ytest))
  17. boston = load_boston()
  18. Xtrain, Xtest, Ytrain, Ytest = train_test_split(boston.data, boston.target, test_size=0.3)
  19. print(Ytrain)
  20. # 默认参数
  21. # Accuracy : 0.9856
  22. # AUC Score (Train): 0.862264
  23. gbm2 = GradientBoostingRegressor(n_estimators=5, max_depth=5, max_features='sqrt', random_state=10)
  24. gbm2.fit(Xtrain, Ytrain) # 分数越高越好
  25. print("gbdt1", gbm2.score(Xtest, Ytest))
  26. gbm2 = GradientBoostingRegressor(n_estimators=50, max_depth=5, max_features='sqrt', random_state=10)
  27. gbm2.fit(Xtrain, Ytrain)
  28. print("gbdt2", gbm2.score(Xtest, Ytest))
  29. gbdt3 = GradientBoostingRegressor(n_estimators=150, max_depth=5, max_features='sqrt', random_state=10)
  30. gbdt3.fit(Xtrain, Ytrain)
  31. print("gbdt3", gbdt3.score(Xtest, Ytest))