Coursera机器学习编程作业Python实现(Andrew Ng)—— 2.2 Regularized Logistic Regression

2.2 Regularized logistic regression

import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

数据读取

data2 = pd.read_csv('ex2data2.txt', header=None, names=['Microchip Test 1', 'Microchip Test 2', 'quality assurance'])
_, ax = plt.subplots(figsize=(10,6))
ax.scatter(x=data2[data2['quality assurance']==1]['Microchip Test 1'], y=data2[data2['quality assurance']==1]['Microchip Test 2'], c='red', marker='o', label='y=1')
ax.scatter(x=data2[data2['quality assurance']==0]['Microchip Test 1'], y=data2[data2['quality assurance']==0]['Microchip Test 2'], c='blue', marker='x', label='y=0')
ax.set_xlabel('Microchip Test 1')
ax.set_ylabel('Microchip Test 2')
ax.legend()
plt.show()

定义函数构造多项式特征,可以自己编写函数,也可以使用sklearn.preprocessing.PolynomialFeatures

from sklearn.preprocessing import PolynomialFeatures
def feature_map(data, power=1):
    poly = PolynomialFeatures(power)
    return poly.fit_transform(data)

数据初始化

X = feature_map(data2.values[:,:-1], 6)
y = data2.values[:,-1]
theta = np.zeros(X.shape[1])

定义sigmoid函数

def sigmoid(x):
    return 1 / (1 + np.exp(-x))

定义代价函数

def costFunction(theta, X, y, lamda=0):
    return np.mean(- y * np.log(sigmoid(np.dot(X, theta))) - (1 - y) * np.log(1 - sigmoid(np.dot(X, theta)))) + lamda / (2 * X.shape[0]) * sum(theta[1:] * theta[1:])
costFunction(theta, X, y)
0.6931471805599454

定义梯度下降函数

def gradient(theta, X, y, lamda=0):
    temp = np.zeros(X.shape[1])
    temp[1:] = lamda / X.shape[0] * theta[1:]
    return (1/X.shape[0]) * np.dot(X.T, sigmoid(np.dot(X, theta)) - y) + temp

定义优化器求解

import scipy.optimize as opt
def opt_minimize(theta, X, y, lamda=0):
    return opt.minimize(fun=costFunction, x0=theta, args=(X, y, lamda), jac=gradient, method='Newton-CG')
res = opt_minimize(theta, X, y, 0.1)
res
fun: 0.39459413886837336
     jac: array([ 1.59606926e-07,  9.55862480e-08,  7.80291421e-08,  7.82880861e-08,
       -5.42155912e-08, -1.95167654e-08,  2.51495090e-08,  2.23552154e-08,
        2.27812757e-08, -1.34140482e-08,  2.62075651e-08, -1.81413433e-08,
        2.50288801e-08,  1.38495687e-09,  4.59880295e-08,  3.81082340e-08,
        6.17029884e-09, -3.11331424e-10,  7.81442803e-09,  1.19263211e-08,
       -4.90765663e-08,  4.46078576e-09, -7.98232782e-09,  1.96910205e-09,
       -1.24834627e-09,  8.92749554e-09, -1.50935178e-08, -7.46805079e-08])
 message: 'Optimization terminated successfully.'
    nfev: 13
    nhev: 0
     nit: 11
    njev: 353
  status: 0
 success: True
       x: array([ 2.75388901,  1.80721804,  2.95665911, -4.21450687, -3.37951426,
       -4.22537305,  0.74551614, -1.07820274, -0.47233842, -0.49912867,
       -3.26576077,  0.5279155 , -1.76304768, -1.20787477, -2.77788005,
       -0.62158143, -0.47032465,  0.62374778, -1.137011  , -1.21215262,
       -0.09179464, -2.63127784,  0.44537292, -0.7362243 ,  0.42674371,
       -1.14910402, -0.95794963, -1.14893012])
theta_result = res.x

定义决策边界函数

def plot_bound(theta, ax):
    x_min = data2.iloc[:,0].min()
    x_max = data2.iloc[:,0].max()
    y_min = data2.iloc[:,1].min()
    y_max = data2.iloc[:,1].max()
    temp = np.array([(i,j) for i in np.linspace(x_min, x_max, 2000) for j in np.linspace(y_min, y_max, 2000)])
    data = feature_map(temp, 6)
    temp = data[np.abs(data @ theta) < 0.0003]
    x = temp[:,1]
    y = temp[:,2]
    ax.scatter(x, y, label='desision bound')
_, ax = plt.subplots(figsize=(10,6))
ax.scatter(x=data2[data2['quality assurance']==1]['Microchip Test 1'], y=data2[data2['quality assurance']==1]['Microchip Test 2'], c='red', marker='o', label='y=1')
ax.scatter(x=data2[data2['quality assurance']==0]['Microchip Test 1'], y=data2[data2['quality assurance']==0]['Microchip Test 2'], c='blue', marker='x', label='y=0')
plot_bound(theta_result, ax)
ax.set_xlabel('Microchip Test 1')
ax.set_ylabel('Microchip Test 2')
ax.legend()
plt.show()

绘制lamda=0的决策边界

theta_0 = opt_minimize(theta, X, y, 0).x
_, ax = plt.subplots(figsize=(10,6))
ax.scatter(x=data2[data2['quality assurance']==1]['Microchip Test 1'], y=data2[data2['quality assurance']==1]['Microchip Test 2'], c='red', marker='o', label='y=1')
ax.scatter(x=data2[data2['quality assurance']==0]['Microchip Test 1'], y=data2[data2['quality assurance']==0]['Microchip Test 2'], c='blue', marker='x', label='y=0')
plot_bound(theta_0, ax)
ax.set_xlabel('Microchip Test 1')
ax.set_ylabel('Microchip Test 2')
ax.legend()
plt.show()

绘制lamda=10的决策边界

theta_10 = opt_minimize(theta, X, y, 10).x
_, ax = plt.subplots(figsize=(10,6))
ax.scatter(x=data2[data2['quality assurance']==1]['Microchip Test 1'], y=data2[data2['quality assurance']==1]['Microchip Test 2'], c='red', marker='o', label='y=1')
ax.scatter(x=data2[data2['quality assurance']==0]['Microchip Test 1'], y=data2[data2['quality assurance']==0]['Microchip Test 2'], c='blue', marker='x', label='y=0')
plot_bound(theta_10, ax)
ax.set_xlabel('Microchip Test 1')
ax.set_ylabel('Microchip Test 2')
ax.legend()
plt.show()

猜你喜欢

转载自www.cnblogs.com/shouzhenghouchuqi/p/10586132.html