wolffd@0: function demopt1(xinit) wolffd@0: %DEMOPT1 Demonstrate different optimisers on Rosenbrock's function. wolffd@0: % wolffd@0: % Description wolffd@0: % The four general optimisers (quasi-Newton, conjugate gradients, wolffd@0: % scaled conjugate gradients, and gradient descent) are applied to the wolffd@0: % minimisation of Rosenbrock's well known `banana' function. Each wolffd@0: % optimiser is run for at most 100 cycles, and a stopping criterion of wolffd@0: % 1.0e-4 is used for both position and function value. At the end, the wolffd@0: % trajectory of each algorithm is shown on a contour plot of the wolffd@0: % function. wolffd@0: % wolffd@0: % DEMOPT1(XINIT) allows the user to specify a row vector with two wolffd@0: % columns as the starting point. The default is the point [-1 1]. Note wolffd@0: % that the contour plot has an x range of [-1.5, 1.5] and a y range of wolffd@0: % [-0.5, 2.1], so it is best to choose a starting point in the same wolffd@0: % region. wolffd@0: % wolffd@0: % See also wolffd@0: % CONJGRAD, GRADDESC, QUASINEW, SCG, ROSEN, ROSEGRAD wolffd@0: % wolffd@0: wolffd@0: % Copyright (c) Ian T Nabney (1996-2001) wolffd@0: wolffd@0: % Initialise start point for search wolffd@0: if nargin < 1 | size(xinit) ~= [1 2] wolffd@0: xinit = [-1 1]; % Traditional start point wolffd@0: end wolffd@0: wolffd@0: % Find out if flops is available (i.e. pre-version 6 Matlab) wolffd@0: v = version; wolffd@0: if (str2num(strtok(v, '.')) >= 6) wolffd@0: flops_works = logical(0); wolffd@0: else wolffd@0: flops_works = logical(1); wolffd@0: end wolffd@0: wolffd@0: % Set up options wolffd@0: options = foptions; % Standard options wolffd@0: options(1) = -1; % Turn off printing completely wolffd@0: options(3) = 1e-8; % Tolerance in value of function wolffd@0: options(14) = 100; % Max. 100 iterations of algorithm wolffd@0: wolffd@0: clc wolffd@0: disp('This demonstration compares the performance of four generic') wolffd@0: disp('optimisation routines when finding the minimum of Rosenbrock''s') wolffd@0: disp('function y = 100*(x2-x1^2)^2 + (1-x1)^2.') wolffd@0: disp(' ') wolffd@0: disp('The global minimum of this function is at [1 1].') wolffd@0: disp(['Each algorithm starts at the point [' num2str(xinit(1))... wolffd@0: ' ' num2str(xinit(2)) '].']) wolffd@0: disp(' ') wolffd@0: disp('Press any key to continue.') wolffd@0: pause wolffd@0: wolffd@0: % Generate a contour plot of the function wolffd@0: a = -1.5:.02:1.5; wolffd@0: b = -0.5:.02:2.1; wolffd@0: [A, B] = meshgrid(a, b); wolffd@0: Z = rosen([A(:), B(:)]); wolffd@0: Z = reshape(Z, length(b), length(a)); wolffd@0: l = -1:6; wolffd@0: v = 2.^l; wolffd@0: fh1 = figure; wolffd@0: contour(a, b, Z, v) wolffd@0: title('Contour plot of Rosenbrock''s function') wolffd@0: hold on wolffd@0: wolffd@0: clc wolffd@0: disp('We now use quasi-Newton, conjugate gradient, scaled conjugate') wolffd@0: disp('gradient, and gradient descent with line search algorithms') wolffd@0: disp('to find a local minimum of this function. Each algorithm is stopped') wolffd@0: disp('when 100 cycles have elapsed, or if the change in function value') wolffd@0: disp('is less than 1.0e-8 or the change in the input vector is less than') wolffd@0: disp('1.0e-4 in magnitude.') wolffd@0: disp(' ') wolffd@0: disp('Press any key to continue.') wolffd@0: pause wolffd@0: wolffd@0: clc wolffd@0: x = xinit; wolffd@0: flops(0) wolffd@0: [x, options, errlog, pointlog] = quasinew('rosen', x, options, 'rosegrad'); wolffd@0: fprintf(1, 'For quasi-Newton method:\n') wolffd@0: fprintf(1, 'Final point is (%f, %f), value is %f\n', x(1), x(2), options(8)) wolffd@0: fprintf(1, 'Number of function evaluations is %d\n', options(10)) wolffd@0: fprintf(1, 'Number of gradient evaluations is %d\n', options(11)) wolffd@0: if flops_works wolffd@0: opt_flops = flops; wolffd@0: fprintf(1, 'Number of floating point operations is %d\n', opt_flops) wolffd@0: end wolffd@0: fprintf(1, 'Number of cycles is %d\n', size(pointlog, 1) - 1); wolffd@0: disp(' ') wolffd@0: wolffd@0: x = xinit; wolffd@0: flops(0) wolffd@0: [x, options, errlog2, pointlog2] = conjgrad('rosen', x, options, 'rosegrad'); wolffd@0: fprintf(1, 'For conjugate gradient method:\n') wolffd@0: fprintf(1, 'Final point is (%f, %f), value is %f\n', x(1), x(2), options(8)) wolffd@0: fprintf(1, 'Number of function evaluations is %d\n', options(10)) wolffd@0: fprintf(1, 'Number of gradient evaluations is %d\n', options(11)) wolffd@0: if flops_works wolffd@0: opt_flops = flops; wolffd@0: fprintf(1, 'Number of floating point operations is %d\n', ... wolffd@0: opt_flops) wolffd@0: end wolffd@0: fprintf(1, 'Number of cycles is %d\n', size(pointlog2, 1) - 1); wolffd@0: disp(' ') wolffd@0: wolffd@0: x = xinit; wolffd@0: flops(0) wolffd@0: [x, options, errlog3, pointlog3] = scg('rosen', x, options, 'rosegrad'); wolffd@0: fprintf(1, 'For scaled conjugate gradient method:\n') wolffd@0: fprintf(1, 'Final point is (%f, %f), value is %f\n', x(1), x(2), options(8)) wolffd@0: fprintf(1, 'Number of function evaluations is %d\n', options(10)) wolffd@0: fprintf(1, 'Number of gradient evaluations is %d\n', options(11)) wolffd@0: if flops_works wolffd@0: opt_flops = flops; wolffd@0: fprintf(1, 'Number of floating point operations is %d\n', opt_flops) wolffd@0: end wolffd@0: fprintf(1, 'Number of cycles is %d\n', size(pointlog3, 1) - 1); wolffd@0: disp(' ') wolffd@0: wolffd@0: x = xinit; wolffd@0: options(7) = 1; % Line minimisation used wolffd@0: flops(0) wolffd@0: [x, options, errlog4, pointlog4] = graddesc('rosen', x, options, 'rosegrad'); wolffd@0: fprintf(1, 'For gradient descent method:\n') wolffd@0: fprintf(1, 'Final point is (%f, %f), value is %f\n', x(1), x(2), options(8)) wolffd@0: fprintf(1, 'Number of function evaluations is %d\n', options(10)) wolffd@0: fprintf(1, 'Number of gradient evaluations is %d\n', options(11)) wolffd@0: if flops_works wolffd@0: opt_flops = flops; wolffd@0: fprintf(1, 'Number of floating point operations is %d\n', opt_flops) wolffd@0: end wolffd@0: fprintf(1, 'Number of cycles is %d\n', size(pointlog4, 1) - 1); wolffd@0: disp(' ') wolffd@0: disp('Note that gradient descent does not reach a local minimum in') wolffd@0: disp('100 cycles.') wolffd@0: disp(' ') wolffd@0: disp('On this problem, where the function is cheap to evaluate, the') wolffd@0: disp('computational effort is dominated by the algorithm overhead.') wolffd@0: disp('However on more complex optimisation problems (such as those') wolffd@0: disp('involving neural networks), computational effort is dominated by') wolffd@0: disp('the number of function and gradient evaluations. Counting these,') wolffd@0: disp('we can rank the algorithms: quasi-Newton (the best), conjugate') wolffd@0: disp('gradient, scaled conjugate gradient, gradient descent (the worst)') wolffd@0: disp(' ') wolffd@0: disp('Press any key to continue.') wolffd@0: pause wolffd@0: clc wolffd@0: disp('We now plot the trajectory of search points for each algorithm') wolffd@0: disp('superimposed on the contour plot.') wolffd@0: disp(' ') wolffd@0: disp('Press any key to continue.') wolffd@0: pause wolffd@0: plot(pointlog4(:,1), pointlog4(:,2), 'bd', 'MarkerSize', 6) wolffd@0: plot(pointlog3(:,1), pointlog3(:,2), 'mx', 'MarkerSize', 6, 'LineWidth', 2) wolffd@0: plot(pointlog(:,1), pointlog(:,2), 'k.', 'MarkerSize', 18) wolffd@0: plot(pointlog2(:,1), pointlog2(:,2), 'g+', 'MarkerSize', 6, 'LineWidth', 2) wolffd@0: lh = legend( 'Gradient Descent', 'Scaled Conjugate Gradients', ... wolffd@0: 'Quasi Newton', 'Conjugate Gradients'); wolffd@0: wolffd@0: hold off wolffd@0: wolffd@0: clc wolffd@0: disp('Press any key to end.') wolffd@0: pause wolffd@0: close(fh1); wolffd@0: clear all;