机器学习sklearn(4)逻辑回归

import numpy as np
import random

#梯度下降函数
def gradientDescent(x, y, theta, alpha, m, numIterations):      #alpha:学习率,m实例个数,numIterations更新次数
    xTrans = x.transpose()     #对矩阵转置
    for i in range(0, numIterations):
        hypothesis = np.dot(x, theta)
        loss = hypothesis - y
        cost = np.sum(loss**2) / (2*m)
        print("Iteration %d / Cost: %f" % (i , cost))
        gradient = np.dot(xTrans , loss) / m
        #更新theta
        theta = theta - alpha * gradient
    return theta

#构造数据集
def genData(numPoints, bias, variance):
    x = np.zeros(shape=(numPoints, 2))
    y = np.zeros(shape=numPoints)
    for i in range(0, numPoints):
        x[i][0] = 1
        x[i][1] = i
        #目标变量
        y[i] = (i + bias) + random.uniform(0,1) * variance
    return x, y

m ,n = np.shape(x)
print(m,n)

numIterations = 50000
alpha = 0.0005
theta = np.ones(n)
theta = gradientDescent(x, y, theta, alpha, m, numIterations)
print(theta)

Iteration 0 / Cost: 408.935704
Iteration 1 / Cost: 230.001203
Iteration 2 / Cost: 156.207536
Iteration 3 / Cost: 125.765765
Iteration 4 / Cost: 113.198945
Iteration 5 / Cost: 108.002366
Iteration 6 / Cost: 105.844704
Iteration 7 / Cost: 104.940060
Iteration 8 / Cost: 104.552065
Iteration 9 / Cost: 104.377101
Iteration 10 / Cost: 104.289977
Iteration 49991 / Cost: 3.767171
Iteration 49992 / Cost: 3.767171
Iteration 49993 / Cost: 3.767171
Iteration 49994 / Cost: 3.767171
Iteration 49995 / Cost: 3.767171
Iteration 49996 / Cost: 3.767171
Iteration 49997 / Cost: 3.767171
Iteration 49998 / Cost: 3.767171
Iteration 49999 / Cost: 3.767171
[29.13339253  1.0064644 ]

猜你喜欢

转载自blog.csdn.net/weixin_44530236/article/details/88623191