1 | """ |
---|
2 | basinhopping: The basinhopping global optimization algorithm |
---|
3 | """ |
---|
4 | from __future__ import division, print_function, absolute_import |
---|
5 | |
---|
6 | import numpy as np |
---|
7 | from numpy import cos, sin |
---|
8 | import scipy.optimize |
---|
9 | import collections |
---|
10 | |
---|
11 | __all__ = ['basinhopping'] |
---|
12 | |
---|
13 | |
---|
14 | class Storage(object): |
---|
15 | """ |
---|
16 | Class used to store the lowest energy structure |
---|
17 | """ |
---|
18 | def __init__(self, x, f): |
---|
19 | self._add(x, f) |
---|
20 | |
---|
21 | def _add(self, x, f): |
---|
22 | self.x = np.copy(x) |
---|
23 | self.f = f |
---|
24 | |
---|
25 | def update(self, x, f): |
---|
26 | if f < self.f: |
---|
27 | self._add(x, f) |
---|
28 | return True |
---|
29 | else: |
---|
30 | return False |
---|
31 | |
---|
32 | def get_lowest(self): |
---|
33 | return self.x, self.f |
---|
34 | |
---|
35 | |
---|
36 | class BasinHoppingRunner(object): |
---|
37 | """This class implements the core of the basinhopping algorithm. |
---|
38 | |
---|
39 | x0 : ndarray |
---|
40 | The starting coordinates. |
---|
41 | minimizer : callable |
---|
42 | The local minimizer, with signature ``result = minimizer(x)``. |
---|
43 | The return value is an `optimize.OptimizeResult` object. |
---|
44 | step_taking : callable |
---|
45 | This function displaces the coordinates randomly. Signature should |
---|
46 | be ``x_new = step_taking(x)``. Note that `x` may be modified in-place. |
---|
47 | accept_tests : list of callables |
---|
48 | To each test is passed the kwargs `f_new`, `x_new`, `f_old` and |
---|
49 | `x_old`. These tests will be used to judge whether or not to accept |
---|
50 | the step. The acceptable return values are True, False, or ``"force |
---|
51 | accept"``. If the latter, then this will override any other tests in |
---|
52 | order to accept the step. This can be used, for example, to forcefully |
---|
53 | escape from a local minimum that ``basinhopping`` is trapped in. |
---|
54 | disp : bool, optional |
---|
55 | Display status messages. |
---|
56 | |
---|
57 | """ |
---|
58 | def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False): |
---|
59 | self.x = np.copy(x0) |
---|
60 | self.minimizer = minimizer |
---|
61 | self.step_taking = step_taking |
---|
62 | self.accept_tests = accept_tests |
---|
63 | self.disp = disp |
---|
64 | |
---|
65 | self.nstep = 0 |
---|
66 | |
---|
67 | # initialize return object |
---|
68 | self.res = scipy.optimize.OptimizeResult() |
---|
69 | self.res.minimization_failures = 0 |
---|
70 | |
---|
71 | # do initial minimization |
---|
72 | minres = minimizer(self.x) |
---|
73 | if not minres.success: |
---|
74 | self.res.minimization_failures += 1 |
---|
75 | if self.disp: |
---|
76 | print("warning: basinhopping: local minimization failure") |
---|
77 | self.x = np.copy(minres.x) |
---|
78 | self.energy = minres.fun |
---|
79 | if self.disp: |
---|
80 | print("basinhopping step %d: f %g" % (self.nstep, self.energy)) |
---|
81 | |
---|
82 | # initialize storage class |
---|
83 | self.storage = Storage(self.x, self.energy) |
---|
84 | |
---|
85 | if hasattr(minres, "nfev"): |
---|
86 | self.res.nfev = minres.nfev |
---|
87 | if hasattr(minres, "njev"): |
---|
88 | self.res.njev = minres.njev |
---|
89 | if hasattr(minres, "nhev"): |
---|
90 | self.res.nhev = minres.nhev |
---|
91 | |
---|
92 | def _monte_carlo_step(self): |
---|
93 | """Do one monte carlo iteration |
---|
94 | |
---|
95 | Randomly displace the coordinates, minimize, and decide whether |
---|
96 | or not to accept the new coordinates. |
---|
97 | """ |
---|
98 | # Take a random step. Make a copy of x because the step_taking |
---|
99 | # algorithm might change x in place |
---|
100 | x_after_step = np.copy(self.x) |
---|
101 | x_after_step = self.step_taking(x_after_step) |
---|
102 | |
---|
103 | # do a local minimization |
---|
104 | minres = self.minimizer(x_after_step) |
---|
105 | x_after_quench = minres.x |
---|
106 | energy_after_quench = minres.fun |
---|
107 | if not minres.success: |
---|
108 | self.res.minimization_failures += 1 |
---|
109 | if self.disp: |
---|
110 | print("warning: basinhopping: local minimization failure") |
---|
111 | if hasattr(minres, "nfev"): |
---|
112 | self.res.nfev += minres.nfev |
---|
113 | if hasattr(minres, "njev"): |
---|
114 | self.res.njev += minres.njev |
---|
115 | if hasattr(minres, "nhev"): |
---|
116 | self.res.nhev += minres.nhev |
---|
117 | |
---|
118 | # accept the move based on self.accept_tests. If any test is false, |
---|
119 | # than reject the step. If any test returns the special value, the |
---|
120 | # string 'force accept', accept the step regardless. This can be used |
---|
121 | # to forcefully escape from a local minima if normal basin hopping |
---|
122 | # steps are not sufficient. |
---|
123 | accept = True |
---|
124 | for test in self.accept_tests: |
---|
125 | testres = test(f_new=energy_after_quench, x_new=x_after_quench, |
---|
126 | f_old=self.energy, x_old=self.x) |
---|
127 | if isinstance(testres, bool): |
---|
128 | if not testres: |
---|
129 | accept = False |
---|
130 | elif isinstance(testres, str): |
---|
131 | if testres == "force accept": |
---|
132 | accept = True |
---|
133 | break |
---|
134 | else: |
---|
135 | raise ValueError("accept test must return bool or string " |
---|
136 | "'force accept'. Type is", type(testres)) |
---|
137 | else: |
---|
138 | raise ValueError("accept test must return bool or string " |
---|
139 | "'force accept'. Type is", type(testres)) |
---|
140 | |
---|
141 | # Report the result of the acceptance test to the take step class. |
---|
142 | # This is for adaptive step taking |
---|
143 | if hasattr(self.step_taking, "report"): |
---|
144 | self.step_taking.report(accept, f_new=energy_after_quench, |
---|
145 | x_new=x_after_quench, f_old=self.energy, |
---|
146 | x_old=self.x) |
---|
147 | |
---|
148 | return x_after_quench, energy_after_quench, accept |
---|
149 | |
---|
150 | def one_cycle(self): |
---|
151 | """Do one cycle of the basinhopping algorithm |
---|
152 | """ |
---|
153 | self.nstep += 1 |
---|
154 | new_global_min = False |
---|
155 | |
---|
156 | xtrial, energy_trial, accept = self._monte_carlo_step() |
---|
157 | |
---|
158 | if accept: |
---|
159 | self.energy = energy_trial |
---|
160 | self.x = np.copy(xtrial) |
---|
161 | new_global_min = self.storage.update(self.x, self.energy) |
---|
162 | |
---|
163 | # print some information |
---|
164 | if self.disp: |
---|
165 | self.print_report(energy_trial, accept) |
---|
166 | if new_global_min: |
---|
167 | print("found new global minimum on step %d with function" |
---|
168 | " value %g" % (self.nstep, self.energy)) |
---|
169 | |
---|
170 | # save some variables as BasinHoppingRunner attributes |
---|
171 | self.xtrial = xtrial |
---|
172 | self.energy_trial = energy_trial |
---|
173 | self.accept = accept |
---|
174 | |
---|
175 | return new_global_min |
---|
176 | |
---|
177 | def print_report(self, energy_trial, accept): |
---|
178 | """print a status update""" |
---|
179 | xlowest, energy_lowest = self.storage.get_lowest() |
---|
180 | print("basinhopping step %d: f %g trial_f %g accepted %d " |
---|
181 | " lowest_f %g" % (self.nstep, self.energy, energy_trial, |
---|
182 | accept, energy_lowest)) |
---|
183 | |
---|
184 | |
---|
185 | class AdaptiveStepsize(object): |
---|
186 | """ |
---|
187 | Class to implement adaptive stepsize. |
---|
188 | |
---|
189 | This class wraps the step taking class and modifies the stepsize to |
---|
190 | ensure the true acceptance rate is as close as possible to the target. |
---|
191 | |
---|
192 | Parameters |
---|
193 | ---------- |
---|
194 | takestep : callable |
---|
195 | The step taking routine. Must contain modifiable attribute |
---|
196 | takestep.stepsize |
---|
197 | accept_rate : float, optional |
---|
198 | The target step acceptance rate |
---|
199 | interval : int, optional |
---|
200 | Interval for how often to update the stepsize |
---|
201 | factor : float, optional |
---|
202 | The step size is multiplied or divided by this factor upon each |
---|
203 | update. |
---|
204 | verbose : bool, optional |
---|
205 | Print information about each update |
---|
206 | |
---|
207 | """ |
---|
208 | def __init__(self, takestep, accept_rate=0.5, interval=50, factor=0.9, |
---|
209 | verbose=True): |
---|
210 | self.takestep = takestep |
---|
211 | self.target_accept_rate = accept_rate |
---|
212 | self.interval = interval |
---|
213 | self.factor = factor |
---|
214 | self.verbose = verbose |
---|
215 | |
---|
216 | self.nstep = 0 |
---|
217 | self.nstep_tot = 0 |
---|
218 | self.naccept = 0 |
---|
219 | |
---|
220 | def __call__(self, x): |
---|
221 | return self.take_step(x) |
---|
222 | |
---|
223 | def _adjust_step_size(self): |
---|
224 | old_stepsize = self.takestep.stepsize |
---|
225 | accept_rate = float(self.naccept) / self.nstep |
---|
226 | if accept_rate > self.target_accept_rate: |
---|
227 | #We're accepting too many steps. This generally means we're |
---|
228 | #trapped in a basin. Take bigger steps |
---|
229 | self.takestep.stepsize /= self.factor |
---|
230 | else: |
---|
231 | #We're not accepting enough steps. Take smaller steps |
---|
232 | self.takestep.stepsize *= self.factor |
---|
233 | if self.verbose: |
---|
234 | print("adaptive stepsize: acceptance rate %f target %f new " |
---|
235 | "stepsize %g old stepsize %g" % (accept_rate, |
---|
236 | self.target_accept_rate, self.takestep.stepsize, |
---|
237 | old_stepsize)) |
---|
238 | |
---|
239 | def take_step(self, x): |
---|
240 | self.nstep += 1 |
---|
241 | self.nstep_tot += 1 |
---|
242 | if self.nstep % self.interval == 0: |
---|
243 | self._adjust_step_size() |
---|
244 | return self.takestep(x) |
---|
245 | |
---|
246 | def report(self, accept, **kwargs): |
---|
247 | "called by basinhopping to report the result of the step" |
---|
248 | if accept: |
---|
249 | self.naccept += 1 |
---|
250 | |
---|
251 | |
---|
252 | class RandomDisplacement(object): |
---|
253 | """ |
---|
254 | Add a random displacement of maximum size, stepsize, to the coordinates |
---|
255 | |
---|
256 | update x inplace |
---|
257 | """ |
---|
258 | def __init__(self, stepsize=0.5): |
---|
259 | self.stepsize = stepsize |
---|
260 | |
---|
261 | def __call__(self, x): |
---|
262 | x += np.random.uniform(-self.stepsize, self.stepsize, np.shape(x)) |
---|
263 | return x |
---|
264 | |
---|
265 | |
---|
266 | class MinimizerWrapper(object): |
---|
267 | """ |
---|
268 | wrap a minimizer function as a minimizer class |
---|
269 | """ |
---|
270 | def __init__(self, minimizer, func=None, **kwargs): |
---|
271 | self.minimizer = minimizer |
---|
272 | self.func = func |
---|
273 | self.kwargs = kwargs |
---|
274 | |
---|
275 | def __call__(self, x0): |
---|
276 | if self.func is None: |
---|
277 | return self.minimizer(x0, **self.kwargs) |
---|
278 | else: |
---|
279 | return self.minimizer(self.func, x0, **self.kwargs) |
---|
280 | |
---|
281 | |
---|
282 | class Metropolis(object): |
---|
283 | """ |
---|
284 | Metropolis acceptance criterion |
---|
285 | """ |
---|
286 | def __init__(self, T): |
---|
287 | self.beta = 1.0 / T |
---|
288 | |
---|
289 | def accept_reject(self, energy_new, energy_old): |
---|
290 | w = min(1.0, np.exp(-(energy_new - energy_old) * self.beta)) |
---|
291 | rand = np.random.rand() |
---|
292 | return w >= rand |
---|
293 | |
---|
294 | def __call__(self, **kwargs): |
---|
295 | """ |
---|
296 | f_new and f_old are mandatory in kwargs |
---|
297 | """ |
---|
298 | return bool(self.accept_reject(kwargs["f_new"], |
---|
299 | kwargs["f_old"])) |
---|
300 | |
---|
301 | |
---|
302 | def basinhopping(func, x0, niter=100, T=1.0, stepsize=0.5, |
---|
303 | minimizer_kwargs=None, take_step=None, accept_test=None, |
---|
304 | callback=None, interval=50, disp=False, niter_success=None): |
---|
305 | """ |
---|
306 | Find the global minimum of a function using the basin-hopping algorithm |
---|
307 | |
---|
308 | .. versionadded:: 0.12.0 |
---|
309 | |
---|
310 | Parameters |
---|
311 | ---------- |
---|
312 | func : callable ``f(x, *args)`` |
---|
313 | Function to be optimized. ``args`` can be passed as an optional item |
---|
314 | in the dict ``minimizer_kwargs`` |
---|
315 | x0 : ndarray |
---|
316 | Initial guess. |
---|
317 | niter : integer, optional |
---|
318 | The number of basin hopping iterations |
---|
319 | T : float, optional |
---|
320 | The "temperature" parameter for the accept or reject criterion. Higher |
---|
321 | "temperatures" mean that larger jumps in function value will be |
---|
322 | accepted. For best results ``T`` should be comparable to the |
---|
323 | separation |
---|
324 | (in function value) between local minima. |
---|
325 | stepsize : float, optional |
---|
326 | initial step size for use in the random displacement. |
---|
327 | minimizer_kwargs : dict, optional |
---|
328 | Extra keyword arguments to be passed to the minimizer |
---|
329 | ``scipy.optimize.minimize()`` Some important options could be: |
---|
330 | method : str |
---|
331 | The minimization method (e.g. ``"L-BFGS-B"``) |
---|
332 | args : tuple |
---|
333 | Extra arguments passed to the objective function (``func``) and |
---|
334 | its derivatives (Jacobian, Hessian). |
---|
335 | |
---|
336 | take_step : callable ``take_step(x)``, optional |
---|
337 | Replace the default step taking routine with this routine. The default |
---|
338 | step taking routine is a random displacement of the coordinates, but |
---|
339 | other step taking algorithms may be better for some systems. |
---|
340 | ``take_step`` can optionally have the attribute ``take_step.stepsize``. |
---|
341 | If this attribute exists, then ``basinhopping`` will adjust |
---|
342 | ``take_step.stepsize`` in order to try to optimize the global minimum |
---|
343 | search. |
---|
344 | accept_test : callable, ``accept_test(f_new=f_new, x_new=x_new, f_old=fold, x_old=x_old)``, optional |
---|
345 | Define a test which will be used to judge whether or not to accept the |
---|
346 | step. This will be used in addition to the Metropolis test based on |
---|
347 | "temperature" ``T``. The acceptable return values are True, |
---|
348 | False, or ``"force accept"``. If the latter, then this will |
---|
349 | override any other tests in order to accept the step. This can be |
---|
350 | used, for example, to forcefully escape from a local minimum that |
---|
351 | ``basinhopping`` is trapped in. |
---|
352 | callback : callable, ``callback(x, f, accept)``, optional |
---|
353 | A callback function which will be called for all minimum found. ``x`` |
---|
354 | and ``f`` are the coordinates and function value of the trial minima, |
---|
355 | and ``accept`` is whether or not that minima was accepted. This can be |
---|
356 | used, for example, to save the lowest N minima found. Also, |
---|
357 | ``callback`` can be used to specify a user defined stop criterion by |
---|
358 | optionally returning True to stop the ``basinhopping`` routine. |
---|
359 | interval : integer, optional |
---|
360 | interval for how often to update the ``stepsize`` |
---|
361 | disp : bool, optional |
---|
362 | Set to True to print status messages |
---|
363 | niter_success : integer, optional |
---|
364 | Stop the run if the global minimum candidate remains the same for this |
---|
365 | number of iterations. |
---|
366 | |
---|
367 | |
---|
368 | Returns |
---|
369 | ------- |
---|
370 | res : OptimizeResult |
---|
371 | The optimization result represented as a ``OptimizeResult`` object. Important |
---|
372 | attributes are: ``x`` the solution array, ``fun`` the value of the |
---|
373 | function at the solution, and ``message`` which describes the cause of |
---|
374 | the termination. See `OptimizeResult` for a description of other attributes. |
---|
375 | |
---|
376 | See Also |
---|
377 | -------- |
---|
378 | minimize : |
---|
379 | The local minimization function called once for each basinhopping step. |
---|
380 | ``minimizer_kwargs`` is passed to this routine. |
---|
381 | |
---|
382 | Notes |
---|
383 | ----- |
---|
384 | Basin-hopping is a stochastic algorithm which attempts to find the global |
---|
385 | minimum of a smooth scalar function of one or more variables [1]_ [2]_ [3]_ |
---|
386 | [4]_. The algorithm in its current form was described by David Wales and |
---|
387 | Jonathan Doye [2]_ http://www-wales.ch.cam.ac.uk/. |
---|
388 | |
---|
389 | The algorithm is iterative with each cycle composed of the following |
---|
390 | features |
---|
391 | |
---|
392 | 1) random perturbation of the coordinates |
---|
393 | |
---|
394 | 2) local minimization |
---|
395 | |
---|
396 | 3) accept or reject the new coordinates based on the minimized function |
---|
397 | value |
---|
398 | |
---|
399 | The acceptance test used here is the Metropolis criterion of standard Monte |
---|
400 | Carlo algorithms, although there are many other possibilities [3]_. |
---|
401 | |
---|
402 | This global minimization method has been shown to be extremely efficient |
---|
403 | for a wide variety of problems in physics and chemistry. It is |
---|
404 | particularly useful when the function has many minima separated by large |
---|
405 | barriers. See the Cambridge Cluster Database |
---|
406 | http://www-wales.ch.cam.ac.uk/CCD.html for databases of molecular systems |
---|
407 | that have been optimized primarily using basin-hopping. This database |
---|
408 | includes minimization problems exceeding 300 degrees of freedom. |
---|
409 | |
---|
410 | See the free software program GMIN (http://www-wales.ch.cam.ac.uk/GMIN) for |
---|
411 | a Fortran implementation of basin-hopping. This implementation has many |
---|
412 | different variations of the procedure described above, including more |
---|
413 | advanced step taking algorithms and alternate acceptance criterion. |
---|
414 | |
---|
415 | For stochastic global optimization there is no way to determine if the true |
---|
416 | global minimum has actually been found. Instead, as a consistency check, |
---|
417 | the algorithm can be run from a number of different random starting points |
---|
418 | to ensure the lowest minimum found in each example has converged to the |
---|
419 | global minimum. For this reason ``basinhopping`` will by default simply |
---|
420 | run for the number of iterations ``niter`` and return the lowest minimum |
---|
421 | found. It is left to the user to ensure that this is in fact the global |
---|
422 | minimum. |
---|
423 | |
---|
424 | Choosing ``stepsize``: This is a crucial parameter in ``basinhopping`` and |
---|
425 | depends on the problem being solved. Ideally it should be comparable to |
---|
426 | the typical separation between local minima of the function being |
---|
427 | optimized. ``basinhopping`` will, by default, adjust ``stepsize`` to find |
---|
428 | an optimal value, but this may take many iterations. You will get quicker |
---|
429 | results if you set a sensible value for ``stepsize``. |
---|
430 | |
---|
431 | Choosing ``T``: The parameter ``T`` is the temperature used in the |
---|
432 | metropolis criterion. Basinhopping steps are accepted with probability |
---|
433 | ``1`` if ``func(xnew) < func(xold)``, or otherwise with probability:: |
---|
434 | |
---|
435 | exp( -(func(xnew) - func(xold)) / T ) |
---|
436 | |
---|
437 | So, for best results, ``T`` should to be comparable to the typical |
---|
438 | difference in function values between local minima. |
---|
439 | |
---|
440 | References |
---|
441 | ---------- |
---|
442 | .. [1] Wales, David J. 2003, Energy Landscapes, Cambridge University Press, |
---|
443 | Cambridge, UK. |
---|
444 | .. [2] Wales, D J, and Doye J P K, Global Optimization by Basin-Hopping and |
---|
445 | the Lowest Energy Structures of Lennard-Jones Clusters Containing up to |
---|
446 | 110 Atoms. Journal of Physical Chemistry A, 1997, 101, 5111. |
---|
447 | .. [3] Li, Z. and Scheraga, H. A., Monte Carlo-minimization approach to the |
---|
448 | multiple-minima problem in protein folding, Proc. Natl. Acad. Sci. USA, |
---|
449 | 1987, 84, 6611. |
---|
450 | .. [4] Wales, D. J. and Scheraga, H. A., Global optimization of clusters, |
---|
451 | crystals, and biomolecules, Science, 1999, 285, 1368. |
---|
452 | |
---|
453 | Examples |
---|
454 | -------- |
---|
455 | The following example is a one-dimensional minimization problem, with many |
---|
456 | local minima superimposed on a parabola. |
---|
457 | |
---|
458 | >>> func = lambda x: cos(14.5 * x - 0.3) + (x + 0.2) * x |
---|
459 | >>> x0=[1.] |
---|
460 | |
---|
461 | Basinhopping, internally, uses a local minimization algorithm. We will use |
---|
462 | the parameter ``minimizer_kwargs`` to tell basinhopping which algorithm to |
---|
463 | use and how to set up that minimizer. This parameter will be passed to |
---|
464 | ``scipy.optimize.minimize()``. |
---|
465 | |
---|
466 | >>> minimizer_kwargs = {"method": "BFGS"} |
---|
467 | >>> ret = basinhopping(func, x0, minimizer_kwargs=minimizer_kwargs, |
---|
468 | ... niter=200) |
---|
469 | >>> print("global minimum: x = %.4f, f(x0) = %.4f" % (ret.x, ret.fun)) |
---|
470 | global minimum: x = -0.1951, f(x0) = -1.0009 |
---|
471 | |
---|
472 | Next consider a two-dimensional minimization problem. Also, this time we |
---|
473 | will use gradient information to significantly speed up the search. |
---|
474 | |
---|
475 | >>> def func2d(x): |
---|
476 | ... f = cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + |
---|
477 | ... 0.2) * x[0] |
---|
478 | ... df = np.zeros(2) |
---|
479 | ... df[0] = -14.5 * sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 |
---|
480 | ... df[1] = 2. * x[1] + 0.2 |
---|
481 | ... return f, df |
---|
482 | |
---|
483 | We'll also use a different local minimization algorithm. Also we must tell |
---|
484 | the minimizer that our function returns both energy and gradient (jacobian) |
---|
485 | |
---|
486 | >>> minimizer_kwargs = {"method":"L-BFGS-B", "jac":True} |
---|
487 | >>> x0 = [1.0, 1.0] |
---|
488 | >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, |
---|
489 | ... niter=200) |
---|
490 | >>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0], |
---|
491 | ... ret.x[1], |
---|
492 | ... ret.fun)) |
---|
493 | global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109 |
---|
494 | |
---|
495 | |
---|
496 | Here is an example using a custom step taking routine. Imagine you want |
---|
497 | the first coordinate to take larger steps then the rest of the coordinates. |
---|
498 | This can be implemented like so: |
---|
499 | |
---|
500 | >>> class MyTakeStep(object): |
---|
501 | ... def __init__(self, stepsize=0.5): |
---|
502 | ... self.stepsize = stepsize |
---|
503 | ... def __call__(self, x): |
---|
504 | ... s = self.stepsize |
---|
505 | ... x[0] += np.random.uniform(-2.*s, 2.*s) |
---|
506 | ... x[1:] += np.random.uniform(-s, s, x[1:].shape) |
---|
507 | ... return x |
---|
508 | |
---|
509 | Since ``MyTakeStep.stepsize`` exists basinhopping will adjust the magnitude |
---|
510 | of ``stepsize`` to optimize the search. We'll use the same 2-D function as |
---|
511 | before |
---|
512 | |
---|
513 | >>> mytakestep = MyTakeStep() |
---|
514 | >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, |
---|
515 | ... niter=200, take_step=mytakestep) |
---|
516 | >>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0], |
---|
517 | ... ret.x[1], |
---|
518 | ... ret.fun)) |
---|
519 | global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109 |
---|
520 | |
---|
521 | |
---|
522 | Now let's do an example using a custom callback function which prints the |
---|
523 | value of every minimum found |
---|
524 | |
---|
525 | >>> def print_fun(x, f, accepted): |
---|
526 | ... print("at minima %.4f accepted %d" % (f, int(accepted))) |
---|
527 | |
---|
528 | We'll run it for only 10 basinhopping steps this time. |
---|
529 | |
---|
530 | >>> np.random.seed(1) |
---|
531 | >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, |
---|
532 | ... niter=10, callback=print_fun) |
---|
533 | at minima 0.4159 accepted 1 |
---|
534 | at minima -0.9073 accepted 1 |
---|
535 | at minima -0.1021 accepted 1 |
---|
536 | at minima -0.1021 accepted 1 |
---|
537 | at minima 0.9102 accepted 1 |
---|
538 | at minima 0.9102 accepted 1 |
---|
539 | at minima 2.2945 accepted 0 |
---|
540 | at minima -0.1021 accepted 1 |
---|
541 | at minima -1.0109 accepted 1 |
---|
542 | at minima -1.0109 accepted 1 |
---|
543 | |
---|
544 | |
---|
545 | The minima at -1.0109 is actually the global minimum, found already on the |
---|
546 | 8th iteration. |
---|
547 | |
---|
548 | Now let's implement bounds on the problem using a custom ``accept_test``: |
---|
549 | |
---|
550 | >>> class MyBounds(object): |
---|
551 | ... def __init__(self, xmax=[1.1,1.1], xmin=[-1.1,-1.1] ): |
---|
552 | ... self.xmax = np.array(xmax) |
---|
553 | ... self.xmin = np.array(xmin) |
---|
554 | ... def __call__(self, **kwargs): |
---|
555 | ... x = kwargs["x_new"] |
---|
556 | ... tmax = bool(np.all(x <= self.xmax)) |
---|
557 | ... tmin = bool(np.all(x >= self.xmin)) |
---|
558 | ... return tmax and tmin |
---|
559 | |
---|
560 | >>> mybounds = MyBounds() |
---|
561 | >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, |
---|
562 | ... niter=10, accept_test=mybounds) |
---|
563 | |
---|
564 | """ |
---|
565 | x0 = np.array(x0) |
---|
566 | |
---|
567 | # set up minimizer |
---|
568 | if minimizer_kwargs is None: |
---|
569 | minimizer_kwargs = dict() |
---|
570 | wrapped_minimizer = MinimizerWrapper(scipy.optimize.minimize, func, |
---|
571 | **minimizer_kwargs) |
---|
572 | |
---|
573 | # set up step taking algorithm |
---|
574 | if take_step is not None: |
---|
575 | if not isinstance(take_step, collections.Callable): |
---|
576 | raise TypeError("take_step must be callable") |
---|
577 | # if take_step.stepsize exists then use AdaptiveStepsize to control |
---|
578 | # take_step.stepsize |
---|
579 | if hasattr(take_step, "stepsize"): |
---|
580 | take_step_wrapped = AdaptiveStepsize(take_step, interval=interval, |
---|
581 | verbose=disp) |
---|
582 | else: |
---|
583 | take_step_wrapped = take_step |
---|
584 | else: |
---|
585 | # use default |
---|
586 | displace = RandomDisplacement(stepsize=stepsize) |
---|
587 | take_step_wrapped = AdaptiveStepsize(displace, interval=interval, |
---|
588 | verbose=disp) |
---|
589 | |
---|
590 | # set up accept tests |
---|
591 | if accept_test is not None: |
---|
592 | if not isinstance(accept_test, collections.Callable): |
---|
593 | raise TypeError("accept_test must be callable") |
---|
594 | accept_tests = [accept_test] |
---|
595 | else: |
---|
596 | accept_tests = [] |
---|
597 | # use default |
---|
598 | metropolis = Metropolis(T) |
---|
599 | accept_tests.append(metropolis) |
---|
600 | |
---|
601 | if niter_success is None: |
---|
602 | niter_success = niter + 2 |
---|
603 | |
---|
604 | bh = BasinHoppingRunner(x0, wrapped_minimizer, take_step_wrapped, |
---|
605 | accept_tests, disp=disp) |
---|
606 | |
---|
607 | # start main iteration loop |
---|
608 | count = 0 |
---|
609 | message = ["requested number of basinhopping iterations completed" |
---|
610 | " successfully"] |
---|
611 | for i in range(niter): |
---|
612 | new_global_min = bh.one_cycle() |
---|
613 | |
---|
614 | if isinstance(callback, collections.Callable): |
---|
615 | # should we pass a copy of x? |
---|
616 | val = callback(bh.xtrial, bh.energy_trial, bh.accept) |
---|
617 | if val is not None: |
---|
618 | if val: |
---|
619 | message = ["callback function requested stop early by" |
---|
620 | "returning True"] |
---|
621 | break |
---|
622 | |
---|
623 | count += 1 |
---|
624 | if new_global_min: |
---|
625 | count = 0 |
---|
626 | elif count > niter_success: |
---|
627 | message = ["success condition satisfied"] |
---|
628 | break |
---|
629 | |
---|
630 | # prepare return object |
---|
631 | lowest = bh.storage.get_lowest() |
---|
632 | res = bh.res |
---|
633 | res.x = np.copy(lowest[0]) |
---|
634 | res.fun = lowest[1] |
---|
635 | res.message = message |
---|
636 | res.nit = i + 1 |
---|
637 | return res |
---|
638 | |
---|
639 | |
---|
640 | def _test_func2d_nograd(x): |
---|
641 | f = (cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0] |
---|
642 | + 1.010876184442655) |
---|
643 | return f |
---|
644 | |
---|
645 | |
---|
646 | def _test_func2d(x): |
---|
647 | f = (cos(14.5 * x[0] - 0.3) + (x[0] + 0.2) * x[0] + cos(14.5 * x[1] - |
---|
648 | 0.3) + (x[1] + 0.2) * x[1] + x[0] * x[1] + 1.963879482144252) |
---|
649 | df = np.zeros(2) |
---|
650 | df[0] = -14.5 * sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 + x[1] |
---|
651 | df[1] = -14.5 * sin(14.5 * x[1] - 0.3) + 2. * x[1] + 0.2 + x[0] |
---|
652 | return f, df |
---|
653 | |
---|
654 | if __name__ == "__main__": |
---|
655 | print("\n\nminimize a 2d function without gradient") |
---|
656 | # minimum expected at ~[-0.195, -0.1] |
---|
657 | kwargs = {"method": "L-BFGS-B"} |
---|
658 | x0 = np.array([1.0, 1.]) |
---|
659 | scipy.optimize.minimize(_test_func2d_nograd, x0, **kwargs) |
---|
660 | ret = basinhopping(_test_func2d_nograd, x0, minimizer_kwargs=kwargs, |
---|
661 | niter=200, disp=False) |
---|
662 | print("minimum expected at func([-0.195, -0.1]) = 0.0") |
---|
663 | print(ret) |
---|
664 | |
---|
665 | print("\n\ntry a harder 2d problem") |
---|
666 | kwargs = {"method": "L-BFGS-B", "jac": True} |
---|
667 | x0 = np.array([1.0, 1.0]) |
---|
668 | ret = basinhopping(_test_func2d, x0, minimizer_kwargs=kwargs, niter=200, |
---|
669 | disp=False) |
---|
670 | print("minimum expected at ~, func([-0.19415263, -0.19415263]) = 0") |
---|
671 | print(ret) |
---|
672 | |
---|