function [k ender]=steepest(f,x,e)
% F = x1-x2 + 2 * x1 ^ 2 + 2 * x1 * x2 + x2 ^ 2; assuming f is equal to
% X = [0; 0];
% E = 10 ^ (- 20 is);
syms X1 X2 m; % m is the learning rate
d = - [diff (f, x1); diff (f, x2)]% , respectively, and seek partial derivatives x1 and x2, i.e., the direction of decline
% - 4*x1 - 2*x2 - 1
% 1 - 2*x2 - 2*x1
flag = 1;% Flag cycle
k = 0; number of iterations%
the while (In Flag)
d_temp SUBS = (D, x1, X (. 1));% respectively for x1, x2 order partial derivative values seeking a
% x1 first into the second derivative
d_temp = subs (d_temp, x2, x ( 2)); 1% then x2 derivative function into
more than 1% obtained in the first partial derivatives x1, x2 of function values
nor = norm (d_temp)% partial derivative determined norm norm at that time
if (nor > = e)% because the number of local minima deflector is close to 0, so used and error comparator
x_temp = x + m * d_temp; value% change the initial point x
% iterating path x, and this path and the point Numerical about
f_temp = subs (f, x1, x_temp (1));% of x1 after the change and x2 is substituted into the objective function
f_temp = SUBS (f_temp, x2, x_temp (2))
% respectively, the x1 updated, x2 with substituting f
H = the diff (f_temp, m)% substituting x1 containing Rate m after the X2 f for m derivation
% at this time is about m monocarboxylic function, G (m) and f is different by (x1, x2 ) produced at a gradient and f, find the minimum value g (m), if the minimum gradient approximation is not 0, the next iteration is performed
m_temp = solve (h);% demand equation, when the second m. Find the extreme value
x = x + m_temp * d_temp; % to update the starting point x
% and the official obtains m x into the equation of iterations, the second point is obtained
k = k + 1% iterations update
the else
In Flag = 0;
End
End
Ender = Double (X);% updated at this time is the condition satisfying X
End
test
X1 X2 syms; F = (X1-2) ^ 2 + 2 * (X2-1) ^ 2;% F may be a function of any X = [. 1;. 3]; E = 10 ^ (- 20 is);
[k ender] = steepest (f , x, e)