天天看點

梯度提升樹的二分類原理

import numpy as np
import matplotlib.pyplot as plt
from sklearn.ensemble import GradientBoostingClassifier
from sklearn import tree
           
梯度提升樹的二分類原理
xi = np.arange(1,11)
yi = np.array([0,0,0,1,1]*2)
display(xi,yi)
# array([ 1,  2,  3,  4,  5,  6,  7,  8,  9, 10])
# array([0, 0, 0, 1, 1, 0, 0, 0, 1, 1])
           
gbdt = GradientBoostingClassifier(n_estimators=3,max_depth=1)
gbdt.fit(xi.reshape(-1,1),yi)
           

第一棵決策樹建構

plt.figure(figsize=(9,6))
_ = tree.plot_tree(gbdt[0,0],filled=True)
           
梯度提升樹的二分類原理

計算
梯度提升樹的二分類原理

F0 = np.log(4/6)
# -0.40546510810816444
           

計算
梯度提升樹的二分類原理

# 殘差,機率,負梯度
yi_1 = yi - 1/(1 + np.exp(-F0))
# array([-0.4, -0.4, -0.4,  0.6,  0.6, -0.4, -0.4, -0.4,  0.6,  0.6])
           
# 計算每個裂分點mse
mse1 = []

for i in range(1,11):
    if i == 10:
        mse1.append(np.var(yi_1))
    else:
        mse1.append((np.var(yi_1[:i])*i + np.var(yi_1[i:])*(10 - i))/10)
        
print(np.round(mse1,4))
# [0.2222 0.2    0.1714 0.225  0.24   0.2333 0.2095 0.15   0.2    0.24  ]

mse1 = np.asarray(mse1)
mse1
# array([0.22222222, 0.2       , 0.17142857, 0.225     , 0.24      ,
#      0.23333333, 0.20952381, 0.15      , 0.2       , 0.24      ])
           
梯度提升樹的二分類原理
# 兩個分支,左側分支預測值
np.round(yi_1[:8].sum()/(((yi[:8] - yi_1[:8])*(1 - yi[:8] + yi_1[:8])).sum()),3)
# -0.625

# 右側分支的預測值
np.round(yi_1[8:].sum()/(((yi[8:] - yi_1[8:])*(1 - yi[8:] + yi_1[8:])).sum()),3)
# 2.5
           
gbdt[0,0].predict(xi.reshape(-1,1))
# array([-0.625, -0.625, -0.625, -0.625, -0.625, -0.625, -0.625, -0.625,
#       2.5  ,  2.5  ])
           
# 第一棵資料預測的值
y_1 = [-0.625]*8 + [2.5]*2
y_1 = np.asarray(y_1)
y_1
# array([-0.625, -0.625, -0.625, -0.625, -0.625, -0.625, -0.625, -0.625,
#       2.5  ,  2.5  ])
           
梯度提升樹的二分類原理
# 學習率 learning_rate = 0.1
F1 = F0 + y_1*0.1
F1.round(4)
# array([-0.468 , -0.468 , -0.468 , -0.468 , -0.468 , -0.468 , -0.468 ,
#      -0.468 , -0.1555, -0.1555])
           
# 殘差,機率,負梯度
yi_2 = yi - 1/(1 + np.exp(-F1))
yi_2.round(4)
# array([-0.3851, -0.3851, -0.3851,  0.6149,  0.6149, -0.3851, -0.3851,
#        -0.3851,  0.5388,  0.5388])
           

拟合第二棵樹

# 計算每個裂分點mse
mse2 = []

for i in range(1,11):
    if i == 10:
        mse2.append(np.var(yi_2))
    else:
        mse2.append((np.var(yi_2[:i])*i + np.var(yi_2[i:])*(10 - i))/10)
        
print(np.round(mse2,4))
mse2 = np.asarray(mse2)
# mse2
# [0.2062 0.1856 0.1592 0.2105 0.2224 0.2187 0.1998 0.15   0.1904 0.2227]
           
plt.figure(figsize=(9,6))
_ = tree.plot_tree(gbdt[1,0],filled=True)
           
梯度提升樹的二分類原理
# 兩個分支,左側分支預測值
np.round(yi_2[:8].sum()/(((yi[:8] - yi_2[:8])*(1 - yi[:8] + yi_2[:8])).sum()),3)
# -0.571

# 右側分支預測值
np.round(yi_2[8:].sum()/(((yi[8:] - yi_2[8:])*(1 - yi[8:] + yi_2[8:])).sum()),3)
# 2.168
           

繼續閱讀