source: trunk/basinhopping.py @ 3042

Last change on this file since 3042 was 3042, checked in by vondreele, 8 years ago

add scipy license to basinhopping
MCSA plot bug fix

  • Property svn:eol-style set to native
  • Property svn:keywords set to Date Author Revision URL Id
File size: 27.1 KB
Line 
1"""
2basinhopping: The basinhopping global optimization algorithm
3"""
4''' Scipy license:
5    Copyright (c) 2001, 2002 Enthought, Inc.
6All rights reserved.
7
8Copyright (c) 2003-2016 SciPy Developers.
9All rights reserved.
10
11Redistribution and use in source and binary forms, with or without
12modification, are permitted provided that the following conditions are met:
13
14  a. Redistributions of source code must retain the above copyright notice,
15     this list of conditions and the following disclaimer.
16  b. Redistributions in binary form must reproduce the above copyright
17     notice, this list of conditions and the following disclaimer in the
18     documentation and/or other materials provided with the distribution.
19  c. Neither the name of Enthought nor the names of the SciPy Developers
20     may be used to endorse or promote products derived from this software
21     without specific prior written permission.
22
23
24THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
25AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
26IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
27ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS
28BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
29OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
30SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
31INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
32CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
33ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
34THE POSSIBILITY OF SUCH DAMAGE.
35'''
36from __future__ import division, print_function, absolute_import
37
38import numpy as np
39from numpy import cos, sin
40import scipy.optimize
41import collections
42
43__all__ = ['basinhopping']
44
45
46class Storage(object):
47    """
48    Class used to store the lowest energy structure
49    """
50    def __init__(self, x, f):
51        self._add(x, f)
52
53    def _add(self, x, f):
54        self.x = np.copy(x)
55        self.f = f
56
57    def update(self, x, f):
58        if f < self.f:
59            self._add(x, f)
60            return True
61        else:
62            return False
63
64    def get_lowest(self):
65        return self.x, self.f
66
67
68class BasinHoppingRunner(object):
69    """This class implements the core of the basinhopping algorithm.
70
71    x0 : ndarray
72        The starting coordinates.
73    minimizer : callable
74        The local minimizer, with signature ``result = minimizer(x)``.
75        The return value is an `optimize.OptimizeResult` object.
76    step_taking : callable
77        This function displaces the coordinates randomly.  Signature should
78        be ``x_new = step_taking(x)``.  Note that `x` may be modified in-place.
79    accept_tests : list of callables
80        To each test is passed the kwargs `f_new`, `x_new`, `f_old` and
81        `x_old`.  These tests will be used to judge whether or not to accept
82        the step.  The acceptable return values are True, False, or ``"force
83        accept"``.  If the latter, then this will override any other tests in
84        order to accept the step.  This can be used, for example, to forcefully
85        escape from a local minimum that ``basinhopping`` is trapped in.
86    disp : bool, optional
87        Display status messages.
88
89    """
90    def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False):
91        self.x = np.copy(x0)
92        self.minimizer = minimizer
93        self.step_taking = step_taking
94        self.accept_tests = accept_tests
95        self.disp = disp
96
97        self.nstep = 0
98
99        # initialize return object
100        self.res = scipy.optimize.OptimizeResult()
101        self.res.minimization_failures = 0
102
103        # do initial minimization
104        minres = minimizer(self.x)
105        if not minres.success:
106            self.res.minimization_failures += 1
107            if self.disp:
108                print("warning: basinhopping: local minimization failure")
109        self.x = np.copy(minres.x)
110        self.energy = minres.fun
111        if self.disp:
112            print("basinhopping step %d: f %g" % (self.nstep, self.energy))
113
114        # initialize storage class
115        self.storage = Storage(self.x, self.energy)
116
117        if hasattr(minres, "nfev"):
118            self.res.nfev = minres.nfev
119        if hasattr(minres, "njev"):
120            self.res.njev = minres.njev
121        if hasattr(minres, "nhev"):
122            self.res.nhev = minres.nhev
123
124    def _monte_carlo_step(self):
125        """Do one monte carlo iteration
126
127        Randomly displace the coordinates, minimize, and decide whether
128        or not to accept the new coordinates.
129        """
130        # Take a random step.  Make a copy of x because the step_taking
131        # algorithm might change x in place
132        x_after_step = np.copy(self.x)
133        x_after_step = self.step_taking(x_after_step)
134
135        # do a local minimization
136        minres = self.minimizer(x_after_step)
137        x_after_quench = minres.x
138        energy_after_quench = minres.fun
139        if not minres.success:
140            self.res.minimization_failures += 1
141            if self.disp:
142                print("warning: basinhopping: local minimization failure")
143        if hasattr(minres, "nfev"):
144            self.res.nfev += minres.nfev
145        if hasattr(minres, "njev"):
146            self.res.njev += minres.njev
147        if hasattr(minres, "nhev"):
148            self.res.nhev += minres.nhev
149
150        # accept the move based on self.accept_tests. If any test is false,
151        # than reject the step.  If any test returns the special value, the
152        # string 'force accept', accept the step regardless.  This can be used
153        # to forcefully escape from a local minima if normal basin hopping
154        # steps are not sufficient.
155        accept = True
156        for test in self.accept_tests:
157            testres = test(f_new=energy_after_quench, x_new=x_after_quench,
158                           f_old=self.energy, x_old=self.x)
159            if isinstance(testres, bool):
160                if not testres:
161                    accept = False
162            elif isinstance(testres, str):
163                if testres == "force accept":
164                    accept = True
165                    break
166                else:
167                    raise ValueError("accept test must return bool or string "
168                                     "'force accept'. Type is", type(testres))
169            else:
170                raise ValueError("accept test must return bool or string "
171                                 "'force accept'. Type is", type(testres))
172
173        # Report the result of the acceptance test to the take step class.
174        # This is for adaptive step taking
175        if hasattr(self.step_taking, "report"):
176            self.step_taking.report(accept, f_new=energy_after_quench,
177                                    x_new=x_after_quench, f_old=self.energy,
178                                    x_old=self.x)
179
180        return x_after_quench, energy_after_quench, accept
181
182    def one_cycle(self):
183        """Do one cycle of the basinhopping algorithm
184        """
185        self.nstep += 1
186        new_global_min = False
187
188        xtrial, energy_trial, accept = self._monte_carlo_step()
189
190        if accept:
191            self.energy = energy_trial
192            self.x = np.copy(xtrial)
193            new_global_min = self.storage.update(self.x, self.energy)
194
195        # print some information
196        if self.disp:
197            self.print_report(energy_trial, accept)
198            if new_global_min:
199                print("found new global minimum on step %d with function"
200                      " value %g" % (self.nstep, self.energy))
201
202        # save some variables as BasinHoppingRunner attributes
203        self.xtrial = xtrial
204        self.energy_trial = energy_trial
205        self.accept = accept
206
207        return new_global_min
208
209    def print_report(self, energy_trial, accept):
210        """print a status update"""
211        xlowest, energy_lowest = self.storage.get_lowest()
212        print("basinhopping step %d: f %g trial_f %g accepted %d "
213              " lowest_f %g" % (self.nstep, self.energy, energy_trial,
214                                accept, energy_lowest))
215
216
217class AdaptiveStepsize(object):
218    """
219    Class to implement adaptive stepsize.
220
221    This class wraps the step taking class and modifies the stepsize to
222    ensure the true acceptance rate is as close as possible to the target.
223
224    Parameters
225    ----------
226    takestep : callable
227        The step taking routine.  Must contain modifiable attribute
228        takestep.stepsize
229    accept_rate : float, optional
230        The target step acceptance rate
231    interval : int, optional
232        Interval for how often to update the stepsize
233    factor : float, optional
234        The step size is multiplied or divided by this factor upon each
235        update.
236    verbose : bool, optional
237        Print information about each update
238
239    """
240    def __init__(self, takestep, accept_rate=0.5, interval=50, factor=0.9,
241                 verbose=True):
242        self.takestep = takestep
243        self.target_accept_rate = accept_rate
244        self.interval = interval
245        self.factor = factor
246        self.verbose = verbose
247
248        self.nstep = 0
249        self.nstep_tot = 0
250        self.naccept = 0
251
252    def __call__(self, x):
253        return self.take_step(x)
254
255    def _adjust_step_size(self):
256        old_stepsize = self.takestep.stepsize
257        accept_rate = float(self.naccept) / self.nstep
258        if accept_rate > self.target_accept_rate:
259            #We're accepting too many steps.  This generally means we're
260            #trapped in a basin.  Take bigger steps
261            self.takestep.stepsize /= self.factor
262        else:
263            #We're not accepting enough steps.  Take smaller steps
264            self.takestep.stepsize *= self.factor
265        if self.verbose:
266            print("adaptive stepsize: acceptance rate %f target %f new "
267                  "stepsize %g old stepsize %g" % (accept_rate,
268                  self.target_accept_rate, self.takestep.stepsize,
269                  old_stepsize))
270
271    def take_step(self, x):
272        self.nstep += 1
273        self.nstep_tot += 1
274        if self.nstep % self.interval == 0:
275            self._adjust_step_size()
276        return self.takestep(x)
277
278    def report(self, accept, **kwargs):
279        "called by basinhopping to report the result of the step"
280        if accept:
281            self.naccept += 1
282
283
284class RandomDisplacement(object):
285    """
286    Add a random displacement of maximum size, stepsize, to the coordinates
287
288    update x inplace
289    """
290    def __init__(self, stepsize=0.5):
291        self.stepsize = stepsize
292
293    def __call__(self, x):
294        x += np.random.uniform(-self.stepsize, self.stepsize, np.shape(x))
295        return x
296
297
298class MinimizerWrapper(object):
299    """
300    wrap a minimizer function as a minimizer class
301    """
302    def __init__(self, minimizer, func=None, **kwargs):
303        self.minimizer = minimizer
304        self.func = func
305        self.kwargs = kwargs
306
307    def __call__(self, x0):
308        if self.func is None:
309            return self.minimizer(x0, **self.kwargs)
310        else:
311            return self.minimizer(self.func, x0, **self.kwargs)
312
313
314class Metropolis(object):
315    """
316    Metropolis acceptance criterion
317    """
318    def __init__(self, T):
319        self.beta = 1.0 / T
320
321    def accept_reject(self, energy_new, energy_old):
322        w = min(1.0, np.exp(-(energy_new - energy_old) * self.beta))
323        rand = np.random.rand()
324        return w >= rand
325
326    def __call__(self, **kwargs):
327        """
328        f_new and f_old are mandatory in kwargs
329        """
330        return bool(self.accept_reject(kwargs["f_new"],
331                    kwargs["f_old"]))
332
333
334def basinhopping(func, x0, niter=100, T=1.0, stepsize=0.5,
335                 minimizer_kwargs=None, take_step=None, accept_test=None,
336                 callback=None, interval=50, disp=False, niter_success=None):
337    """
338    Find the global minimum of a function using the basin-hopping algorithm
339
340    .. versionadded:: 0.12.0
341
342    Parameters
343    ----------
344    func : callable ``f(x, *args)``
345        Function to be optimized.  ``args`` can be passed as an optional item
346        in the dict ``minimizer_kwargs``
347    x0 : ndarray
348        Initial guess.
349    niter : integer, optional
350        The number of basin hopping iterations
351    T : float, optional
352        The "temperature" parameter for the accept or reject criterion.  Higher
353        "temperatures" mean that larger jumps in function value will be
354        accepted.  For best results ``T`` should be comparable to the
355        separation
356        (in function value) between local minima.
357    stepsize : float, optional
358        initial step size for use in the random displacement.
359    minimizer_kwargs : dict, optional
360        Extra keyword arguments to be passed to the minimizer
361        ``scipy.optimize.minimize()`` Some important options could be:
362            method : str
363                The minimization method (e.g. ``"L-BFGS-B"``)
364            args : tuple
365                Extra arguments passed to the objective function (``func``) and
366                its derivatives (Jacobian, Hessian).
367
368    take_step : callable ``take_step(x)``, optional
369        Replace the default step taking routine with this routine.  The default
370        step taking routine is a random displacement of the coordinates, but
371        other step taking algorithms may be better for some systems.
372        ``take_step`` can optionally have the attribute ``take_step.stepsize``.
373        If this attribute exists, then ``basinhopping`` will adjust
374        ``take_step.stepsize`` in order to try to optimize the global minimum
375        search.
376    accept_test : callable, ``accept_test(f_new=f_new, x_new=x_new, f_old=fold, x_old=x_old)``, optional
377        Define a test which will be used to judge whether or not to accept the
378        step.  This will be used in addition to the Metropolis test based on
379        "temperature" ``T``.  The acceptable return values are True,
380        False, or ``"force accept"``.  If the latter, then this will
381        override any other tests in order to accept the step.  This can be
382        used, for example, to forcefully escape from a local minimum that
383        ``basinhopping`` is trapped in.
384    callback : callable, ``callback(x, f, fmin, accept)``, optional
385        A callback function which will be called for all minimum found.  ``x``
386        and ``f`` are the coordinates and function value of the trial minima,
387        and ``accept`` is whether or not that minima was accepted.  'fmin' is the lowest f found.
388        This can be used, for example, to save the lowest N minima found.  Also,
389        ``callback`` can be used to specify a user defined stop criterion by
390        optionally returning True to stop the ``basinhopping`` routine.
391    interval : integer, optional
392        interval for how often to update the ``stepsize``
393    disp : bool, optional
394        Set to True to print status messages
395    niter_success : integer, optional
396        Stop the run if the global minimum candidate remains the same for this
397        number of iterations.
398
399    Returns
400    -------
401    res : OptimizeResult
402        The optimization result represented as a ``OptimizeResult`` object.  Important
403        attributes are: ``x`` the solution array, ``fun`` the value of the
404        function at the solution, and ``message`` which describes the cause of
405        the termination. See `OptimizeResult` for a description of other attributes.
406
407    See Also
408    --------
409    minimize :
410        The local minimization function called once for each basinhopping step.
411        ``minimizer_kwargs`` is passed to this routine.
412
413    Notes
414    -----
415    Basin-hopping is a stochastic algorithm which attempts to find the global
416    minimum of a smooth scalar function of one or more variables [1]_ [2]_ [3]_
417    [4]_.  The algorithm in its current form was described by David Wales and
418    Jonathan Doye [2]_ http://www-wales.ch.cam.ac.uk/.
419
420    The algorithm is iterative with each cycle composed of the following
421    features
422
423    1) random perturbation of the coordinates
424
425    2) local minimization
426
427    3) accept or reject the new coordinates based on the minimized function
428       value
429
430    The acceptance test used here is the Metropolis criterion of standard Monte
431    Carlo algorithms, although there are many other possibilities [3]_.
432
433    This global minimization method has been shown to be extremely efficient
434    for a wide variety of problems in physics and chemistry.  It is
435    particularly useful when the function has many minima separated by large
436    barriers. See the Cambridge Cluster Database
437    http://www-wales.ch.cam.ac.uk/CCD.html for databases of molecular systems
438    that have been optimized primarily using basin-hopping.  This database
439    includes minimization problems exceeding 300 degrees of freedom.
440
441    See the free software program GMIN (http://www-wales.ch.cam.ac.uk/GMIN) for
442    a Fortran implementation of basin-hopping.  This implementation has many
443    different variations of the procedure described above, including more
444    advanced step taking algorithms and alternate acceptance criterion.
445
446    For stochastic global optimization there is no way to determine if the true
447    global minimum has actually been found. Instead, as a consistency check,
448    the algorithm can be run from a number of different random starting points
449    to ensure the lowest minimum found in each example has converged to the
450    global minimum.  For this reason ``basinhopping`` will by default simply
451    run for the number of iterations ``niter`` and return the lowest minimum
452    found.  It is left to the user to ensure that this is in fact the global
453    minimum.
454
455    Choosing ``stepsize``:  This is a crucial parameter in ``basinhopping`` and
456    depends on the problem being solved.  Ideally it should be comparable to
457    the typical separation between local minima of the function being
458    optimized.  ``basinhopping`` will, by default, adjust ``stepsize`` to find
459    an optimal value, but this may take many iterations.  You will get quicker
460    results if you set a sensible value for ``stepsize``.
461
462    Choosing ``T``: The parameter ``T`` is the temperature used in the
463    metropolis criterion.  Basinhopping steps are accepted with probability
464    ``1`` if ``func(xnew) < func(xold)``, or otherwise with probability::
465
466        exp( -(func(xnew) - func(xold)) / T )
467
468    So, for best results, ``T`` should to be comparable to the typical
469    difference in function values between local minima.
470
471    References
472    ----------
473    .. [1] Wales, David J. 2003, Energy Landscapes, Cambridge University Press,
474        Cambridge, UK.
475    .. [2] Wales, D J, and Doye J P K, Global Optimization by Basin-Hopping and
476        the Lowest Energy Structures of Lennard-Jones Clusters Containing up to
477        110 Atoms.  Journal of Physical Chemistry A, 1997, 101, 5111.
478    .. [3] Li, Z. and Scheraga, H. A., Monte Carlo-minimization approach to the
479        multiple-minima problem in protein folding, Proc. Natl. Acad. Sci. USA,
480        1987, 84, 6611.
481    .. [4] Wales, D. J. and Scheraga, H. A., Global optimization of clusters,
482        crystals, and biomolecules, Science, 1999, 285, 1368.
483
484    Examples
485    --------
486    The following example is a one-dimensional minimization problem,  with many
487    local minima superimposed on a parabola.
488
489    >>> func = lambda x: cos(14.5 * x - 0.3) + (x + 0.2) * x
490    >>> x0=[1.]
491
492    Basinhopping, internally, uses a local minimization algorithm.  We will use
493    the parameter ``minimizer_kwargs`` to tell basinhopping which algorithm to
494    use and how to set up that minimizer.  This parameter will be passed to
495    ``scipy.optimize.minimize()``.
496
497    >>> minimizer_kwargs = {"method": "BFGS"}
498    >>> ret = basinhopping(func, x0, minimizer_kwargs=minimizer_kwargs,
499    ...                    niter=200)
500    >>> print("global minimum: x = %.4f, f(x0) = %.4f" % (ret.x, ret.fun))
501    global minimum: x = -0.1951, f(x0) = -1.0009
502
503    Next consider a two-dimensional minimization problem. Also, this time we
504    will use gradient information to significantly speed up the search.
505
506    >>> def func2d(x):
507    ...     f = cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] +
508    ...                                                         0.2) * x[0]
509    ...     df = np.zeros(2)
510    ...     df[0] = -14.5 * sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2
511    ...     df[1] = 2. * x[1] + 0.2
512    ...     return f, df
513
514    We'll also use a different local minimization algorithm.  Also we must tell
515    the minimizer that our function returns both energy and gradient (jacobian)
516
517    >>> minimizer_kwargs = {"method":"L-BFGS-B", "jac":True}
518    >>> x0 = [1.0, 1.0]
519    >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
520    ...                    niter=200)
521    >>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0],
522    ...                                                           ret.x[1],
523    ...                                                           ret.fun))
524    global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109
525
526
527    Here is an example using a custom step taking routine.  Imagine you want
528    the first coordinate to take larger steps then the rest of the coordinates.
529    This can be implemented like so:
530
531    >>> class MyTakeStep(object):
532    ...    def __init__(self, stepsize=0.5):
533    ...        self.stepsize = stepsize
534    ...    def __call__(self, x):
535    ...        s = self.stepsize
536    ...        x[0] += np.random.uniform(-2.*s, 2.*s)
537    ...        x[1:] += np.random.uniform(-s, s, x[1:].shape)
538    ...        return x
539
540    Since ``MyTakeStep.stepsize`` exists basinhopping will adjust the magnitude
541    of ``stepsize`` to optimize the search.  We'll use the same 2-D function as
542    before
543
544    >>> mytakestep = MyTakeStep()
545    >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
546    ...                    niter=200, take_step=mytakestep)
547    >>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0],
548    ...                                                           ret.x[1],
549    ...                                                           ret.fun))
550    global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109
551
552
553    Now let's do an example using a custom callback function which prints the
554    value of every minimum found
555
556    >>> def print_fun(x, f, accepted):
557    ...         print("at minima %.4f accepted %d" % (f, int(accepted)))
558
559    We'll run it for only 10 basinhopping steps this time.
560
561    >>> np.random.seed(1)
562    >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
563    ...                    niter=10, callback=print_fun)
564    at minima 0.4159 accepted 1
565    at minima -0.9073 accepted 1
566    at minima -0.1021 accepted 1
567    at minima -0.1021 accepted 1
568    at minima 0.9102 accepted 1
569    at minima 0.9102 accepted 1
570    at minima 2.2945 accepted 0
571    at minima -0.1021 accepted 1
572    at minima -1.0109 accepted 1
573    at minima -1.0109 accepted 1
574
575
576    The minima at -1.0109 is actually the global minimum, found already on the
577    8th iteration.
578
579    Now let's implement bounds on the problem using a custom ``accept_test``:
580
581    >>> class MyBounds(object):
582    ...     def __init__(self, xmax=[1.1,1.1], xmin=[-1.1,-1.1] ):
583    ...         self.xmax = np.array(xmax)
584    ...         self.xmin = np.array(xmin)
585    ...     def __call__(self, **kwargs):
586    ...         x = kwargs["x_new"]
587    ...         tmax = bool(np.all(x <= self.xmax))
588    ...         tmin = bool(np.all(x >= self.xmin))
589    ...         return tmax and tmin
590
591    >>> mybounds = MyBounds()
592    >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
593    ...                    niter=10, accept_test=mybounds)
594
595    """
596    x0 = np.array(x0)
597
598    # set up minimizer
599    if minimizer_kwargs is None:
600        minimizer_kwargs = dict()
601    wrapped_minimizer = MinimizerWrapper(scipy.optimize.minimize, func,
602                                         **minimizer_kwargs)
603
604    # set up step taking algorithm
605    if take_step is not None:
606        if not isinstance(take_step, collections.Callable):
607            raise TypeError("take_step must be callable")
608        # if take_step.stepsize exists then use AdaptiveStepsize to control
609        # take_step.stepsize
610        if hasattr(take_step, "stepsize"):
611            take_step_wrapped = AdaptiveStepsize(take_step, interval=interval,
612                                                 verbose=disp)
613        else:
614            take_step_wrapped = take_step
615    else:
616        # use default
617        displace = RandomDisplacement(stepsize=stepsize)
618        take_step_wrapped = AdaptiveStepsize(displace, interval=interval,
619                                             verbose=disp)
620
621    # set up accept tests
622    if accept_test is not None:
623        if not isinstance(accept_test, collections.Callable):
624            raise TypeError("accept_test must be callable")
625        accept_tests = [accept_test]
626    else:
627        accept_tests = []
628    # use default
629    metropolis = Metropolis(T)
630    accept_tests.append(metropolis)
631
632    if niter_success is None:
633        niter_success = niter + 2
634
635    bh = BasinHoppingRunner(x0, wrapped_minimizer, take_step_wrapped,
636                            accept_tests, disp=disp)
637
638    # start main iteration loop
639    count = 0
640    message = ["requested number of basinhopping iterations completed"
641               " successfully"]
642    for i in range(niter):
643        new_global_min = bh.one_cycle()
644
645        if isinstance(callback, collections.Callable):
646            # should we pass a copy of x?
647            val = callback(bh.xtrial, bh.energy_trial, bh.storage.get_lowest()[1], bh.accept)
648            if val is not None:
649                if val:
650                    message = ["callback function requested stop early by"
651                               "returning True"]
652                    break
653
654        count += 1
655        if new_global_min:
656            count = 0
657        elif count > niter_success:
658            message = ["success condition satisfied"]
659            break
660
661    # prepare return object
662    lowest = bh.storage.get_lowest()
663    res = bh.res
664    res.x = np.copy(lowest[0])
665    res.fun = lowest[1]
666    res.message = message
667    res.nit = i + 1
668    return res
669
670
671def _test_func2d_nograd(x):
672    f = (cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0]
673         + 1.010876184442655)
674    return f
675
676
677def _test_func2d(x):
678    f = (cos(14.5 * x[0] - 0.3) + (x[0] + 0.2) * x[0] + cos(14.5 * x[1] -
679         0.3) + (x[1] + 0.2) * x[1] + x[0] * x[1] + 1.963879482144252)
680    df = np.zeros(2)
681    df[0] = -14.5 * sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 + x[1]
682    df[1] = -14.5 * sin(14.5 * x[1] - 0.3) + 2. * x[1] + 0.2 + x[0]
683    return f, df
684
685if __name__ == "__main__":
686    print("\n\nminimize a 2d function without gradient")
687    # minimum expected at ~[-0.195, -0.1]
688    kwargs = {"method": "L-BFGS-B"}
689    x0 = np.array([1.0, 1.])
690    scipy.optimize.minimize(_test_func2d_nograd, x0, **kwargs)
691    ret = basinhopping(_test_func2d_nograd, x0, minimizer_kwargs=kwargs,
692                       niter=200, disp=False)
693    print("minimum expected at  func([-0.195, -0.1]) = 0.0")
694    print(ret)
695
696    print("\n\ntry a harder 2d problem")
697    kwargs = {"method": "L-BFGS-B", "jac": True}
698    x0 = np.array([1.0, 1.0])
699    ret = basinhopping(_test_func2d, x0, minimizer_kwargs=kwargs, niter=200,
700                       disp=False)
701    print("minimum expected at ~, func([-0.19415263, -0.19415263]) = 0")
702    print(ret)
703
Note: See TracBrowser for help on using the repository browser.