机器学习:sklearn中xgboost模块的XGBRegressor函数(回归)

demo


from xgboost.sklearn import XGBRegressor
from xgboost import plot_importance
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.datasets import load_boston
from sklearn.metrics import mean_squared_error
 
# 导入数据集
boston = load_boston()
X ,y = boston.data,boston.target
X_train,X_test,y_train,y_test = train_test_split(X,y,test_size=0.2,random_state=0)

# 初始化模型
model = XGBRegressor(max_depth=3,
                        learning_rate=0.1,
                        n_estimators=100,
                        objective='reg:linear', # 此默认参数与 XGBClassifier 不同
                        booster='gbtree',
                        gamma=0,
                        min_child_weight=1,
                        subsample=1,
                        colsample_bytree=1,
                        reg_alpha=0,
                        reg_lambda=1,
                        random_state=0)

# 训练模型
# 边训练边预测
model.fit(X_train, y_train, 
          eval_set=[(X_train, y_train), (X_test, y_test)],             
          eval_metric='rmse', 
          verbose=50, 
          early_stopping_rounds=50)
 
# 对测试集进行预测
ans = model.predict(X_test)
mse = mean_squared_error(y_test, ans)  # 先试真实值,再是预测值
print('mse:', mse)
 
# 显示重要特征
plot_importance(model)
plt.show()
)
输出

       [0] validation_0-rmse:21.687 validation_1-rmse:21.3558        [50] validation_0-rmse:1.8122 validation_1-rmse:4.8143        [99] validation_0-rmse:1.3396 validation_1-rmse:4.63377

       mse: 21.471843729261288

图片.png

猜你喜欢

转载自juejin.im/post/7126425974408740872