Based Jupyter implement gradient descent Case (python)

A, y = f (x) = x²

import numpy as np
import pandas as pd
from pandas import Series,DataFrame
import matplotlib.pyplot as plt
 
%matplotlib inline
##原函数
def f(x):
    return x**2
##导数
def h(x):
    return 2*x
X = []
Y = []

x = 2
step = 0.8
f_change = f(x)
f_current = f(x)
X.append(x)
Y.append(f_current)
while f_change > 1e-10:
    x = x - step * h(x)
    tmp = f(x)
    f_change = np.abs(f_current - tmp)
    f_current = tmp
    X.append(x)
    Y.append(f_current)
print (u"最终结果为:",(x,f_current))

Here Insert Picture Description

fig = plt.figure()
X2 = np.arange(-2.1,2.15,0.05)
Y2 = X2 **2

plt.plot(X2,Y2,'-',color='#666666',linewidth=2)
plt.plot(X,Y,'bo--')
plt.title(u'$y=x^2$函数求解最小值,最终解为:x=%.2f,y=%.2f' % (x,f_current))
plt.show()

The results are shown:

Here Insert Picture Description

二、z = f(x,y)=x²+y²

##原函数
def f(x,y):
    return x ** 2+y ** 2
##偏函数
def h(t):
    return 2 * t

X = []
Y = []
Z = []

x = 2
y = 2
f_change = x ** 2 +y ** 2
f_current = f(x,y)
step = 0.1
X.append(x)
Y.append(y)
Z.append(f_current)
while f_change > 1e-10:
    x = x - step * h(x)
    y = y - step * h(y)
    f_change = f_current - f(x,y)
    f_current = f(x,y)
    X.append(x)
    Y.append(y)
    Z.append(f_current)
print (u"最终结果为:",(x,y))

Here Insert Picture Description

from mpl_toolkits.mplot3d import Axes3D

fig = plt.figure()
ax = Axes3D(fig)
X2 = np.arange(-2,2,0.2)
Y2 = np.arange(-2,2,0.2)
X2,Y2 = np.meshgrid(X2,Y2)
Z2 = X2 ** 2 + Y2 ** 2

ax.plot_surface(X2,Y2,Z2,rstride=1,cstride=1,cmap='rainbow')
ax.plot(X,Y,Z,'ro--')

ax.set_title(u'梯度下降法求解,最终结果为: x=%.2f,y=%.2f,z=%.2f' % (x,y,f_current))

plt.show()

The results are shown:
Here Insert Picture Description

Published 17 original articles · won praise 40 · views 461

Guess you like

Origin blog.csdn.net/qq_42585108/article/details/105089674