function rosenbrock_newton % Initial point %x0=[1.2; 1.2]; x0=[-1.2; 1]; % Exact solution xs=[1;1]; % Maximum number of iterations Nmax = 100000; % Use Newtons method? 1 -> Yes; 0 -> steepest descent Newton = 1; xk = x0; df0= df(x0); % Iteration loop for i = 1:Nmax % Calculate gradient and Hessian dfk = df(xk); if Newton, df2k= df2(xk); else df2k= eye(length(xk)); end % FILL IN! % Search direction pk = ? if norm(pk) <= 1.0E-12*norm(df0) % converged! break; end if dfk'*pk >= -1.0E-13*norm(pk)*norm(dfk) % too little descent - switch to steepest descent pk = -dfk; end % Calculate alpha by backtracking alphak = armijo(xk,pk); % FILL IN! % Line search step xk = ? fprintf('%3d: x = [%13.6e,%13.6e], p = [%13.6e,%13.6e], alpha = %13.6e, f = %13.6e, |df|= %13.6e, |x-x*| = %13.6e\n', ... i, xk(1), xk(2), pk(1), pk(2), alphak, f(xk), norm(dfk), norm(xk-xs)); end end % Backtracking algorithm function alpha=armijo(x,p) alpha = 1.0; rho = 0.5; c1 = 1.0E-03; % FILL IN! % Hint: Algorithm 3.1 p. 37 in N&W end % Objective function function r=f(x) r = 100*(x(2)-x(1)^2)^2 + (1-x(1))^2; end % Gradient function r=df(x) r=[-400*(x(2)-x(1)^2)*x(1) - 2*(1-x(1)); 200*(x(2)-x(1)^2)]; end % Hessian function r=df2(x) r=[[400*(3*x(1)^2-x(2))+2, -400*x(1)]; [-400*x(1), 200]]; end