24/8/8算法笔记 决策回归树

发布于:2024-08-09 ⋅ 阅读:(126) ⋅ 点赞:(0)
from sklearn.tree import DecisionTreeRegressor
from sklearn import tree
import numpy as np
import matplotlib.pyplot as plt
创建数据
X_train = np.linspace(0,2*np.pi,40).reshape(-1,1)#训练数据就是符合要求的二维数据
#二维:[[样本一].[样本二],[样本三]....]多个样本,每个样本,包含多个属性

#将 X_train 的每个特征值通过正弦(np.sin)和余弦(np.cos)函数进行变换,并将结果沿着指定的轴(在这个例子中是轴1,即列方向)进行拼接。
y_train = np.concatenate([np.sin(X_train),np.cos(X_train)],axis = 1)

plt.figure(figsize=(4,4))
plt.scatter(y_train[:,0],y_train[:,1])

X_test = np.linspace(0,2*np.pi,256).reshape(-1,1)
首先事业线性回归
from sklearn.linear_model import LinearRegression
model = LinearRegression()
model.fit(X_train,y_train)#建模

y_pred = model.predict(X_test)#预测

#如果效果好,绘制测试数据应该是圆
plt.scatter(y_pred[:,0],y_pred[:,1])

使用svr支持向量机
from sklearn.svm import SVR
model = SVR(kernel='poly')
model.fit(X_train,y_train)#建模

y_pred = model.predict(X_test)#预测
display(y_pred.shape)
#如果效果好,绘制测试数据应该是圆
plt.scatter(y_pred[:,0],y_pred[:,1])

使用向量机会报错

决策树

#决策树深度为3

model =DecisionTreeRegressor(max_depth=3)
model.fit(X_train,y_train)#建模

y_pred = model.predict(X_test)#预测
display(y_pred.shape)
#如果效果好,绘制测试数据应该是圆
plt.scatter(y_pred[:,0],y_pred[:,1])
_ = plt.axis('equal')#横纵坐标相等

import graphviz
dot_data = tree.export_graphviz(model,filled=True,rounded = True)

graph = graphviz.Source(dot_data)
graph

#决策树深度为4

model =DecisionTreeRegressor(max_depth=4)
model.fit(X_train,y_train)#建模

y_pred = model.predict(X_test)#预测
display(y_pred.shape)
#如果效果好,绘制测试数据应该是圆
plt.scatter(y_pred[:,0],y_pred[:,1])
_ = plt.axis('equal')#横纵坐标相等

#其实点有256个,这是16簇,每个簇16个点

不进行剪枝,默认展开
model =DecisionTreeRegressor(max_depth=None)
model.fit(X_train,y_train)#建模

y_pred = model.predict(X_test)#预测
display(y_pred.shape)
#如果效果好,绘制测试数据应该是圆
plt.scatter(y_pred[:,0],y_pred[:,1])
_ = plt.axis('equal')#横纵坐标相等

model.get_depth()

dot_data = tree.export_graphviz(model,filled=True,rounded = True)

graph = graphviz.Source(dot_data)
graph

是很大的一个决策树

#训练数据,分纯了
model.score(X_train,y_train)