Moved all the optimization stuff into its own page.

--HG--
rename : docs/docs/algorithms.xml => docs/docs/optimization.xml
extra : convert_revision : svn%3Afdd8eb12-d10e-0410-9acb-85c331704f74/trunk%403478
This commit is contained in:
Davis King 2010-02-21 17:32:12 +00:00
parent becd071a17
commit cc14e3f9eb
6 changed files with 472 additions and 421 deletions

View File

@ -13,7 +13,7 @@
This page documents library components that are all basically just implementations of
mathematical functions or algorithms that don't fit in any of the other pages
of the dlib documentation. So this includes things like checksums, cryptographic hashes,
optimization, sorting, etc.
sorting, etc.
</p>
</body>
@ -30,32 +30,6 @@
<item>running_stats</item>
<item>running_covariance</item>
<item>random_subset_selector</item>
<item nolink="true">
<name>Optimization</name>
<sub>
<item>derivative</item>
<item>negate_function</item>
<item>make_line_search_function</item>
<item>poly_min_extrap</item>
<item>lagrange_poly_min_extrap</item>
<item>line_search</item>
<item>find_min</item>
<item>find_min_single_variable</item>
<item>find_min_using_approximate_derivatives</item>
<item>find_min_bobyqa</item>
<item>solve_qp_using_smo</item>
<item>find_max</item>
<item>find_max_single_variable</item>
<item>find_max_using_approximate_derivatives</item>
<item>find_max_bobyqa</item>
<item>cg_search_strategy</item>
<item>bfgs_search_strategy</item>
<item>lbfgs_search_strategy</item>
<item>objective_delta_stop_strategy</item>
<item>gradient_norm_stop_strategy</item>
</sub>
</item>
<item nolink="true">
<name>Quantum Computing</name>
<sub>
@ -129,374 +103,7 @@
<components>
<!-- ************************************************************************* -->
<component>
<name>derivative</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_abstract.h</spec_file>
<description>
This is a function that takes another function as input and returns
a function object that numerically computes the derivative of the input function.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>negate_function</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_abstract.h</spec_file>
<description>
This is a function that takes another function as input and returns
a function object that computes the negation of the input function.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>make_line_search_function</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_line_search_abstract.h</spec_file>
<description>
This is a function that takes another function f(x) as input and returns
a function object l(z) = f(start + z*direction). It is useful for
turning multi-variable functions into single-variable functions for
use with the <a href="#line_search">line_search</a> routine.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>poly_min_extrap</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_line_search_abstract.h</spec_file>
<description>
This function finds the 3rd degree polynomial that interpolates a
set of points and returns you the minimum of that polynomial.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>lagrange_poly_min_extrap</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_line_search_abstract.h</spec_file>
<description>
This function finds the second order polynomial that interpolates a
set of points and returns you the minimum of that polynomial.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>line_search</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_line_search_abstract.h</spec_file>
<description>
Performs a gradient based line search on a given function and returns the input
that makes the function significantly smaller.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>cg_search_strategy</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_search_strategies_abstract.h</spec_file>
<description>
This object represents a strategy for determining which direction
a <a href="#line_search">line search</a> should be carried out along. This particular object
is an implementation of the Polak-Ribiere conjugate gradient method
for determining this direction.
<p>
This method uses an amount of memory that is linear in the number
of variables to be optimized. So it is capable of handling problems
with a very large number of variables. However, it is generally
not as good as the L-BFGS algorithm (see the
<a href="#lbfgs_search_strategy">lbfgs_search_strategy</a> class).
</p>
</description>
<examples>
<example>optimization_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>bfgs_search_strategy</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_search_strategies_abstract.h</spec_file>
<description>
This object represents a strategy for determining which direction
a <a href="#line_search">line search</a> should be carried out along. This particular object
is an implementation of the BFGS quasi-newton method for determining
this direction.
<p>
This method uses an amount of memory that is quadratic in the number
of variables to be optimized. It is generally very effective but
if your problem has a very large number of variables then it isn't
appropriate. Instead You should try the <a href="#lbfgs_search_strategy">lbfgs_search_strategy</a>.
</p>
</description>
<examples>
<example>optimization_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>lbfgs_search_strategy</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_search_strategies_abstract.h</spec_file>
<description>
This object represents a strategy for determining which direction
a <a href="#line_search">line search</a> should be carried out along. This particular object
is an implementation of the L-BFGS quasi-newton method for determining
this direction.
<p>
This method uses an amount of memory that is linear in the number
of variables to be optimized. This makes it an excellent method
to use when an optimization problem has a large number of variables.
</p>
</description>
<examples>
<example>optimization_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>objective_delta_stop_strategy</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_stop_strategies_abstract.h</spec_file>
<description>
This object represents a strategy for deciding if an optimization
algorithm should terminate. This particular object looks at the
change in the objective function from one iteration to the next and
bases its decision on how large this change is. If the change
is below a user given threshold then the search stops.
</description>
<examples>
<example>optimization_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>gradient_norm_stop_strategy</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_stop_strategies_abstract.h</spec_file>
<description>
This object represents a strategy for deciding if an optimization
algorithm should terminate. This particular object looks at the
norm (i.e. the length) of the current gradient vector and
stops if it is smaller than a user given threshold.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_min</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_abstract.h</spec_file>
<description>
Performs an unconstrained minimization of a nonlinear function using
some search strategy (e.g. <a href="#bfgs_search_strategy">bfgs_search_strategy</a>).
</description>
<examples>
<example>optimization_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_min_single_variable</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_line_search_abstract.h</spec_file>
<description>
Performs a bound constrained minimization of a nonlinear function. The
function must be of a single variable. Derivatives are not required.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>solve_qp_using_smo</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_solve_qp_using_smo_abstract.h</spec_file>
<description>
This function solves the following quadratic program:
<pre>
Minimize: f(alpha) == 0.5*trans(alpha)*Q*alpha - trans(alpha)*b
subject to the following constraints:
sum(alpha) == C
min(alpha) >= 0
</pre>
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_min_bobyqa</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_bobyqa_abstract.h</spec_file>
<description>
This function defines the dlib interface to the BOBYQA software developed by M.J.D Powell.
BOBYQA is a method for optimizing a function in the absence of derivative information.
Powell described it as a method that seeks the least value of a function of many
variables, by applying a trust region method that forms quadratic models by
interpolation. There is usually some freedom in the interpolation conditions,
which is taken up by minimizing the Frobenius norm of the change to the second
derivative of the model, beginning with the zero matrix. The values of the variables
are constrained by upper and lower bounds.
<p>
The following paper, published in 2009 by Powell, describes the
detailed working of the BOBYQA algorithm.
<blockquote>
The BOBYQA algorithm for bound constrained optimization
without derivatives by M.J.D. Powell
</blockquote>
</p>
<p>
Note that BOBYQA only works on functions of two or more variables. So if you need to perform
derivative-free optimization on a function of a single variable
then you should use the <a href="#find_min_single_variable">find_min_single_variable</a>
function.
</p>
</description>
<examples>
<example>optimization_ex.cpp.html</example>
<example>model_selection_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_max_bobyqa</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_bobyqa_abstract.h</spec_file>
<description>
This function is identical to the <a href="#find_min_bobyqa">find_min_bobyqa</a> routine
except that it negates the objective function before performing optimization.
Thus this function will attempt to find the maximizer of the objective rather than
the minimizer.
<p>
Note that BOBYQA only works on functions of two or more variables. So if you need to perform
derivative-free optimization on a function of a single variable
then you should use the <a href="#find_max_single_variable">find_max_single_variable</a>
function.
</p>
</description>
<examples>
<example>optimization_ex.cpp.html</example>
<example>model_selection_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_min_using_approximate_derivatives</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_abstract.h</spec_file>
<description>
Performs an unconstrained minimization of a nonlinear function using
some search strategy (e.g. <a href="#bfgs_search_strategy">bfgs_search_strategy</a>).
This version doesn't take a gradient function but instead numerically approximates
the gradient.
</description>
<examples>
<example>optimization_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_max</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_abstract.h</spec_file>
<description>
Performs an unconstrained maximization of a nonlinear function using
some search strategy (e.g. <a href="#bfgs_search_strategy">bfgs_search_strategy</a>).
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_max_single_variable</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_line_search_abstract.h</spec_file>
<description>
Performs a bound constrained maximization of a nonlinear function. The
function must be of a single variable. Derivatives are not required.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_max_using_approximate_derivatives</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_abstract.h</spec_file>
<description>
Performs an unconstrained maximization of a nonlinear function using
some search strategy (e.g. <a href="#bfgs_search_strategy">bfgs_search_strategy</a>).
This version doesn't take a gradient function but instead numerically approximates
the gradient.
</description>
</component>
<!-- ************************************************************************* -->
<component checked="true">
<name>bigint</name>
<file>dlib/bigint.h</file>

View File

@ -222,7 +222,7 @@
</ul><br/>
</li>
<li> <i>Introduction to Derivative-Free Optimization</i> by Conn, Scheinberg, and Vicente
<ul> If you want to understand algorithms like <a href="algorithms.html#find_min_bobyqa">BOBYQA</a>
<ul> If you want to understand algorithms like <a href="optimization.html#find_min_bobyqa">BOBYQA</a>
then this is a good recent book on the subject. Note that a book like <i>Practical Methods of Optimization</i>
is almost certainly a prerequisite for reading this book. As an aside, BOBYQA is not discussed in this book but
its predecessor, NEWUOA is.

View File

@ -109,11 +109,11 @@
<a href="dlib/matrix/matrix_utilities_abstract.h.html#trans">transpose</a>,
<a href="dlib/matrix/matrix_math_functions_abstract.h.html#sin">trig functions</a>, etc.</li>
<li>Unconstrained non-linear optimization algorithms using the
<a href="algorithms.html#cg_search_strategy">conjugate gradient</a>,
<a href="algorithms.html#bfgs_search_strategy">BFGS</a>, and
<a href="algorithms.html#lbfgs_search_strategy">L-BFGS</a> techniques</li>
<a href="optimization.html#cg_search_strategy">conjugate gradient</a>,
<a href="optimization.html#bfgs_search_strategy">BFGS</a>, and
<a href="optimization.html#lbfgs_search_strategy">L-BFGS</a> techniques</li>
<li>Box-constrained derivative-free optimization via the
<a href="algorithms.html#find_min_bobyqa">BOBYQA</a> algorithm</li>
<a href="optimization.html#find_min_bobyqa">BOBYQA</a> algorithm</li>
<li>A <a href="algorithms.html#bigint">big integer</a> object</li>
<li>A <a href="algorithms.html#rand">random number</a> object</li>
</ul>

View File

@ -10,6 +10,11 @@
<link>algorithms.html</link>
<chm_sub>algorithms.xml</chm_sub>
</item>
<item>
<name>Optimization</name>
<link>optimization.html</link>
<chm_sub>optimization.xml</chm_sub>
</item>
<item>
<name>Machine Learning</name>
<link>ml.html</link>
@ -31,7 +36,7 @@
<chm_sub>api.xml</chm_sub>
</item>
<item>
<name>Network</name>
<name>Networking</name>
<link>network.html</link>
<chm_sub>network.xml</chm_sub>
</item>

439
docs/docs/optimization.xml Normal file
View File

@ -0,0 +1,439 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<?xml-stylesheet type="text/xsl" href="stylesheet.xsl"?>
<doc>
<title>Optimization</title>
<!-- ************************************************************************* -->
<body>
<br/><br/>
<p>
This page documents library components that attempt to find the
minimum or maximum of a user supplied function.
</p>
</body>
<!-- ************************************************************************* -->
<menu width="150">
<top>
<section>
<name>Main Routines</name>
<item>find_min</item>
<item>find_min_single_variable</item>
<item>find_min_using_approximate_derivatives</item>
<item>find_min_bobyqa</item>
<item>solve_qp_using_smo</item>
<item>find_max</item>
<item>find_max_single_variable</item>
<item>find_max_using_approximate_derivatives</item>
<item>find_max_bobyqa</item>
</section>
<section>
<name>Strategies</name>
<item>cg_search_strategy</item>
<item>bfgs_search_strategy</item>
<item>lbfgs_search_strategy</item>
<item>objective_delta_stop_strategy</item>
<item>gradient_norm_stop_strategy</item>
</section>
<section>
<name>Helper Routines</name>
<item>derivative</item>
<item>negate_function</item>
<item>make_line_search_function</item>
<item>poly_min_extrap</item>
<item>lagrange_poly_min_extrap</item>
<item>line_search</item>
</section>
</top>
</menu>
<!-- ************************************************************************* -->
<!-- ************************************************************************* -->
<!-- ************************************************************************* -->
<components>
<!-- ************************************************************************* -->
<component>
<name>derivative</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_abstract.h</spec_file>
<description>
This is a function that takes another function as input and returns
a function object that numerically computes the derivative of the input function.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>negate_function</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_abstract.h</spec_file>
<description>
This is a function that takes another function as input and returns
a function object that computes the negation of the input function.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>make_line_search_function</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_line_search_abstract.h</spec_file>
<description>
This is a function that takes another function f(x) as input and returns
a function object l(z) = f(start + z*direction). It is useful for
turning multi-variable functions into single-variable functions for
use with the <a href="#line_search">line_search</a> routine.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>poly_min_extrap</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_line_search_abstract.h</spec_file>
<description>
This function finds the 3rd degree polynomial that interpolates a
set of points and returns you the minimum of that polynomial.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>lagrange_poly_min_extrap</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_line_search_abstract.h</spec_file>
<description>
This function finds the second order polynomial that interpolates a
set of points and returns you the minimum of that polynomial.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>line_search</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_line_search_abstract.h</spec_file>
<description>
Performs a gradient based line search on a given function and returns the input
that makes the function significantly smaller.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>cg_search_strategy</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_search_strategies_abstract.h</spec_file>
<description>
This object represents a strategy for determining which direction
a <a href="#line_search">line search</a> should be carried out along. This particular object
is an implementation of the Polak-Ribiere conjugate gradient method
for determining this direction.
<p>
This method uses an amount of memory that is linear in the number
of variables to be optimized. So it is capable of handling problems
with a very large number of variables. However, it is generally
not as good as the L-BFGS algorithm (see the
<a href="#lbfgs_search_strategy">lbfgs_search_strategy</a> class).
</p>
</description>
<examples>
<example>optimization_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>bfgs_search_strategy</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_search_strategies_abstract.h</spec_file>
<description>
This object represents a strategy for determining which direction
a <a href="#line_search">line search</a> should be carried out along. This particular object
is an implementation of the BFGS quasi-newton method for determining
this direction.
<p>
This method uses an amount of memory that is quadratic in the number
of variables to be optimized. It is generally very effective but
if your problem has a very large number of variables then it isn't
appropriate. Instead You should try the <a href="#lbfgs_search_strategy">lbfgs_search_strategy</a>.
</p>
</description>
<examples>
<example>optimization_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>lbfgs_search_strategy</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_search_strategies_abstract.h</spec_file>
<description>
This object represents a strategy for determining which direction
a <a href="#line_search">line search</a> should be carried out along. This particular object
is an implementation of the L-BFGS quasi-newton method for determining
this direction.
<p>
This method uses an amount of memory that is linear in the number
of variables to be optimized. This makes it an excellent method
to use when an optimization problem has a large number of variables.
</p>
</description>
<examples>
<example>optimization_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>objective_delta_stop_strategy</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_stop_strategies_abstract.h</spec_file>
<description>
This object represents a strategy for deciding if an optimization
algorithm should terminate. This particular object looks at the
change in the objective function from one iteration to the next and
bases its decision on how large this change is. If the change
is below a user given threshold then the search stops.
</description>
<examples>
<example>optimization_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>gradient_norm_stop_strategy</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_stop_strategies_abstract.h</spec_file>
<description>
This object represents a strategy for deciding if an optimization
algorithm should terminate. This particular object looks at the
norm (i.e. the length) of the current gradient vector and
stops if it is smaller than a user given threshold.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_min</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_abstract.h</spec_file>
<description>
Performs an unconstrained minimization of a nonlinear function using
some search strategy (e.g. <a href="#bfgs_search_strategy">bfgs_search_strategy</a>).
</description>
<examples>
<example>optimization_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_min_single_variable</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_line_search_abstract.h</spec_file>
<description>
Performs a bound constrained minimization of a nonlinear function. The
function must be of a single variable. Derivatives are not required.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>solve_qp_using_smo</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_solve_qp_using_smo_abstract.h</spec_file>
<description>
This function solves the following quadratic program:
<pre>
Minimize: f(alpha) == 0.5*trans(alpha)*Q*alpha - trans(alpha)*b
subject to the following constraints:
sum(alpha) == C
min(alpha) >= 0
</pre>
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_min_bobyqa</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_bobyqa_abstract.h</spec_file>
<description>
This function defines the dlib interface to the BOBYQA software developed by M.J.D Powell.
BOBYQA is a method for optimizing a function in the absence of derivative information.
Powell described it as a method that seeks the least value of a function of many
variables, by applying a trust region method that forms quadratic models by
interpolation. There is usually some freedom in the interpolation conditions,
which is taken up by minimizing the Frobenius norm of the change to the second
derivative of the model, beginning with the zero matrix. The values of the variables
are constrained by upper and lower bounds.
<p>
The following paper, published in 2009 by Powell, describes the
detailed working of the BOBYQA algorithm.
<blockquote>
The BOBYQA algorithm for bound constrained optimization
without derivatives by M.J.D. Powell
</blockquote>
</p>
<p>
Note that BOBYQA only works on functions of two or more variables. So if you need to perform
derivative-free optimization on a function of a single variable
then you should use the <a href="#find_min_single_variable">find_min_single_variable</a>
function.
</p>
</description>
<examples>
<example>optimization_ex.cpp.html</example>
<example>model_selection_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_max_bobyqa</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_bobyqa_abstract.h</spec_file>
<description>
This function is identical to the <a href="#find_min_bobyqa">find_min_bobyqa</a> routine
except that it negates the objective function before performing optimization.
Thus this function will attempt to find the maximizer of the objective rather than
the minimizer.
<p>
Note that BOBYQA only works on functions of two or more variables. So if you need to perform
derivative-free optimization on a function of a single variable
then you should use the <a href="#find_max_single_variable">find_max_single_variable</a>
function.
</p>
</description>
<examples>
<example>optimization_ex.cpp.html</example>
<example>model_selection_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_min_using_approximate_derivatives</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_abstract.h</spec_file>
<description>
Performs an unconstrained minimization of a nonlinear function using
some search strategy (e.g. <a href="#bfgs_search_strategy">bfgs_search_strategy</a>).
This version doesn't take a gradient function but instead numerically approximates
the gradient.
</description>
<examples>
<example>optimization_ex.cpp.html</example>
</examples>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_max</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_abstract.h</spec_file>
<description>
Performs an unconstrained maximization of a nonlinear function using
some search strategy (e.g. <a href="#bfgs_search_strategy">bfgs_search_strategy</a>).
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_max_single_variable</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_line_search_abstract.h</spec_file>
<description>
Performs a bound constrained maximization of a nonlinear function. The
function must be of a single variable. Derivatives are not required.
</description>
</component>
<!-- ************************************************************************* -->
<component>
<name>find_max_using_approximate_derivatives</name>
<file>dlib/optimization.h</file>
<spec_file link="true">dlib/optimization/optimization_abstract.h</spec_file>
<description>
Performs an unconstrained maximization of a nonlinear function using
some search strategy (e.g. <a href="#bfgs_search_strategy">bfgs_search_strategy</a>).
This version doesn't take a gradient function but instead numerically approximates
the gradient.
</description>
</component>
<!-- ************************************************************************* -->
</components>
<!-- ************************************************************************* -->
</doc>

View File

@ -32,27 +32,27 @@
<term file="dlib/optimization/optimization_line_search_abstract.h.html" name="optimize_single_variable_failure"/>
<term file="dlib/optimization/optimization_bobyqa_abstract.h.html" name="bobyqa_failure"/>
<term file="algorithms.html" name="derivative"/>
<term file="algorithms.html" name="make_line_search_function"/>
<term file="algorithms.html" name="poly_min_extrap"/>
<term file="algorithms.html" name="lagrange_poly_min_extrap"/>
<term file="algorithms.html" name="line_search"/>
<term file="algorithms.html" name="find_min"/>
<term file="algorithms.html" name="find_min_single_variable"/>
<term file="algorithms.html" name="find_min_using_approximate_derivatives"/>
<term file="algorithms.html" name="find_min_bobyqa"/>
<term file="algorithms.html" name="solve_qp_using_smo"/>
<term link="algorithms.html#find_min_bobyqa" name="BOBYQA"/>
<term file="algorithms.html" name="find_max"/>
<term file="algorithms.html" name="find_max_single_variable"/>
<term file="algorithms.html" name="find_max_using_approximate_derivatives"/>
<term file="algorithms.html" name="find_max_bobyqa"/>
<term file="algorithms.html" name="objective_delta_stop_strategy"/>
<term file="algorithms.html" name="gradient_norm_stop_strategy"/>
<term file="algorithms.html" name="negate_function"/>
<term file="algorithms.html" name="cg_search_strategy"/>
<term file="algorithms.html" name="bfgs_search_strategy"/>
<term file="algorithms.html" name="lbfgs_search_strategy"/>
<term file="optimization.html" name="derivative"/>
<term file="optimization.html" name="make_line_search_function"/>
<term file="optimization.html" name="poly_min_extrap"/>
<term file="optimization.html" name="lagrange_poly_min_extrap"/>
<term file="optimization.html" name="line_search"/>
<term file="optimization.html" name="find_min"/>
<term file="optimization.html" name="find_min_single_variable"/>
<term file="optimization.html" name="find_min_using_approximate_derivatives"/>
<term file="optimization.html" name="find_min_bobyqa"/>
<term file="optimization.html" name="solve_qp_using_smo"/>
<term link="optimization.html#find_min_bobyqa" name="BOBYQA"/>
<term file="optimization.html" name="find_max"/>
<term file="optimization.html" name="find_max_single_variable"/>
<term file="optimization.html" name="find_max_using_approximate_derivatives"/>
<term file="optimization.html" name="find_max_bobyqa"/>
<term file="optimization.html" name="objective_delta_stop_strategy"/>
<term file="optimization.html" name="gradient_norm_stop_strategy"/>
<term file="optimization.html" name="negate_function"/>
<term file="optimization.html" name="cg_search_strategy"/>
<term file="optimization.html" name="bfgs_search_strategy"/>
<term file="optimization.html" name="lbfgs_search_strategy"/>
<term file="bayes.html" name="set_node_value"/>
<term file="bayes.html" name="node_value"/>