机器学习sklearn(3)多项式回归

import numpy as np
import matplotlib.pyplot as plt
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error   #均方误差

sample_num = 32
#np.linspace   在指定区间内返回均匀间隔的数字
#reshape(-1,1)  将数据转换为1列,行不做要求(有多少就多少);同理reshape(1,-1)将数据转换为1行,列有多少就多少
data_x = np.linspace(start=0, stop=sample_num/4, num=sample_num).reshape(-1,1)   
randn_n = np.random.randn(sample_num).reshape(-1,1)

#构造线性函数: h(x)=0.3−0.05∗x 
n0, n1 = 0.3, -0.05
curve_linear = yr = n0 + n1*data_x     
curve_linear += randn_n * 0.03   #加上随机变量

# curve using polynomial
#构造多项式函数 h(x)=0.1−0.02∗x+0.03∗x2−0.04∗x3 
n0, n1, n2, n3 = 0.1, -0.02, 0.03, -0.04
curve_polynomial = n0 + n1*data_x + n2*(data_x**2) + n3*(data_x**3)
curve_polynomial += randn_n

%matplotlib inline
plt.subplot(2,1,1)
plt.plot(data_x, curve_linear, 'b.')
plt.xlabel("np.linspace(0,8,32)")
plt.ylabel("curve_linear(data_x)")

plt.subplot(2,1,2)
plt.plot(data_x, curve_polynomial, 'b.')
plt.xlabel("np.linspace(0,8,32)")
plt.ylabel("curve_polynomial(data_x)")

Text(0, 0.5, 'curve_polynomial(data_x)')

poly_features_1 = PolynomialFeatures(degree = 3)    #多项式的阶数
linear_reg = LinearRegression()
linear_reg.fit(poly_features_1.fit_transform(data_x), curve_linear)
print(poly_features_1.get_params())

{'degree': 3, 'include_bias': True, 'interaction_only': False}

fit_x = np.linspace(start=0, stop=sample_num/4, num=1024).reshape(-1,1)
fit_linear = linear_reg.predict(poly_features_1.fit_transform(fit_x))

plt.plot(fit_x, fit_linear, 'r-')
plt.plot(data_x, curve_linear, 'b.')
plt.xlabel("np.linspace(0,8,32)")
plt.ylabel("curve fitting using Polynomial")
plt.show()

poly_features_3 = PolynomialFeatures(degree = 3)
linear_reg_best = LinearRegression()
linear_reg_best.fit(poly_features_3.fit_transform(data_x), curve_polynomial)
fit_x = np.linspace(start=0, stop=sample_num/4, num=1024).reshape(-1,1)
fit_linear_best = linear_reg_best.predict(poly_features_3.fit_transform(fit_x))

plt.plot(fit_x, fit_linear_best, 'r-')
plt.plot(data_x, curve_polynomial, 'b.')
plt.xlabel("np.linspace(0,8,32)")
plt.ylabel("curve fitting using Polynomial")
plt.show()

hypo = linear_reg.predict(poly_features_1.fit_transform(data_x))
hypo_best = linear_reg_best.predict(poly_features_3.fit_transform(data_x))
print("curve_linear mean_squared_error: ", mean_squared_error(curve_linear,hypo))
print("curve_polynomial mean_squared_error: ", mean_squared_error(curve_polynomial,hypo_best))

curve_linear mean_squared_error:  0.0006331615000094218
curve_polynomial mean_squared_error:  0.7035127777882473

print("linear_reg intercept: ",linear_reg.intercept_)   #输出拟合方程截距

linear_reg intercept:  [0.29540646]

print("linear_reg Coefficients: ", linear_reg.coef_)   #输出拟合方程系数

linear_reg Coefficients:  [[ 0.         -0.04252453 -0.00040055 -0.00010925]]

print("linear_reg_best intercept: ",linear_reg_best.intercept_)   #输出拟合方程截距

linear_reg_best intercept:  [-0.05311792]

print("linear_reg_best Coefficients: ",linear_reg_best.coef_)     #输出拟合方程系数

linear_reg_best Coefficients:  [[ 0.          0.22918247  0.01664819 -0.04364167]]

猜你喜欢

转载自blog.csdn.net/weixin_44530236/article/details/88580212