简单算法之多项式回归

```

import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
df_train=pd.read_excel('data/linear_regression/Concrete_Data.xls')
data=np.array(df_train)
np.random.seed(0)
num_row=np.size(data, axis=0)#1030
index=np.random.permutation(num_row)
split_point=100
x_train=data[:-split_point,:-1]
y_train=data[:-split_point,-1]
x_test=data[-split_point:,:-1]
y_test=data[-split_point:,-1]

ones=np.ones((np.size(x_train,axis=0),1))#
x_train=np.hstack((ones,x_train))
model_a=LinearRegression()
model_a.fit(x_train, y_train)
ones=np.ones((np.size(x_test,axis=0),1))
x_test=np.hstack((ones,x_test))
y_pre=model_a.predict(x_test)
print('R方检测(线性回归):', model_a.score(x_test, y_test))

quadratic_featurizer=PolynomialFeatures(degree=2)
x_trian_quadratic=quadratic_featurizer.fit_transform(x_train)
x_test_quadratic=quadratic_featurizer.transform(x_test)

model_b=LinearRegression()
model_b.fit(x_trian_quadratic, y_train)

y_quadratic_pre=model_b.predict(x_test_quadratic)

print('R方检测(多项式回归):', model_b.score(x_test_quadratic, y_test))

```

猜你喜欢

转载自blog.csdn.net/mbshqqb/article/details/84391252