diff --git a/python_examples/global_optimization.py b/python_examples/global_optimization.py index 820a262a0..3558cf8a8 100755 --- a/python_examples/global_optimization.py +++ b/python_examples/global_optimization.py @@ -3,8 +3,8 @@ # # # This is an example illustrating the use of the global optimization routine, -# find_max_global(), from the dlib C++ Library. This is a tool for finding the -# inputs to a function that result in the function giving its maximal output. +# find_min_global(), from the dlib C++ Library. This is a tool for finding the +# inputs to a function that result in the function giving its minimal output. # This is a very useful tool for hyper parameter search when applying machine # learning methods. There are also many other applications for this kind of # general derivative free optimization. However, in this example program, we @@ -30,17 +30,17 @@ import dlib from math import sin,cos,pi,exp,sqrt # This is a standard test function for these kinds of optimization problems. -# It has a bunch of local maxima, with the global maximum resulting in -# holder_table()==19.2085025679. +# It has a bunch of local minima, with the global minimum resulting in +# holder_table()==-19.2085025679. def holder_table(x0,x1): - return abs(sin(x0)*cos(x1)*exp(abs(1-sqrt(x0*x0+x1*x1)/pi))) + return -abs(sin(x0)*cos(x1)*exp(abs(1-sqrt(x0*x0+x1*x1)/pi))) # Find the optimal inputs to holder_table(). The print statements that follow -# show that find_max_global() finds the optimal settings to high precision. -x,y = dlib.find_max_global(holder_table, +# show that find_min_global() finds the optimal settings to high precision. +x,y = dlib.find_min_global(holder_table, [-10,-10], # Lower bound constraints on x0 and x1 respectively [10,10], # Upper bound constraints on x0 and x1 respectively - 80) # The number of times find_max_global() will call holder_table() + 80) # The number of times find_min_global() will call holder_table() print("optimal inputs: {}".format(x)); print("optimal output: {}".format(y));