source: branch/2frame/basinhopping.py @ 2930

Last change on this file since 2930 was 2930, checked in by vondreele, 4 years ago

tweaks of basinhopping
replace more TextCtrl? with ValidatedTxtCtrl? in phaseGUI

  • Property svn:eol-style set to native
  • Property svn:keywords set to Date Author Revision URL Id
File size: 25.5 KB
Line 
1"""
2basinhopping: The basinhopping global optimization algorithm
3"""
4from __future__ import division, print_function, absolute_import
5
6import numpy as np
7from numpy import cos, sin
8import scipy.optimize
9import collections
10
11__all__ = ['basinhopping']
12
13
14class Storage(object):
15    """
16    Class used to store the lowest energy structure
17    """
18    def __init__(self, x, f):
19        self._add(x, f)
20
21    def _add(self, x, f):
22        self.x = np.copy(x)
23        self.f = f
24
25    def update(self, x, f):
26        if f < self.f:
27            self._add(x, f)
28            return True
29        else:
30            return False
31
32    def get_lowest(self):
33        return self.x, self.f
34
35
36class BasinHoppingRunner(object):
37    """This class implements the core of the basinhopping algorithm.
38
39    x0 : ndarray
40        The starting coordinates.
41    minimizer : callable
42        The local minimizer, with signature ``result = minimizer(x)``.
43        The return value is an `optimize.OptimizeResult` object.
44    step_taking : callable
45        This function displaces the coordinates randomly.  Signature should
46        be ``x_new = step_taking(x)``.  Note that `x` may be modified in-place.
47    accept_tests : list of callables
48        To each test is passed the kwargs `f_new`, `x_new`, `f_old` and
49        `x_old`.  These tests will be used to judge whether or not to accept
50        the step.  The acceptable return values are True, False, or ``"force
51        accept"``.  If the latter, then this will override any other tests in
52        order to accept the step.  This can be used, for example, to forcefully
53        escape from a local minimum that ``basinhopping`` is trapped in.
54    disp : bool, optional
55        Display status messages.
56
57    """
58    def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False):
59        self.x = np.copy(x0)
60        self.minimizer = minimizer
61        self.step_taking = step_taking
62        self.accept_tests = accept_tests
63        self.disp = disp
64
65        self.nstep = 0
66
67        # initialize return object
68        self.res = scipy.optimize.OptimizeResult()
69        self.res.minimization_failures = 0
70
71        # do initial minimization
72        minres = minimizer(self.x)
73        if not minres.success:
74            self.res.minimization_failures += 1
75            if self.disp:
76                print("warning: basinhopping: local minimization failure")
77        self.x = np.copy(minres.x)
78        self.energy = minres.fun
79        if self.disp:
80            print("basinhopping step %d: f %g" % (self.nstep, self.energy))
81
82        # initialize storage class
83        self.storage = Storage(self.x, self.energy)
84
85        if hasattr(minres, "nfev"):
86            self.res.nfev = minres.nfev
87        if hasattr(minres, "njev"):
88            self.res.njev = minres.njev
89        if hasattr(minres, "nhev"):
90            self.res.nhev = minres.nhev
91
92    def _monte_carlo_step(self):
93        """Do one monte carlo iteration
94
95        Randomly displace the coordinates, minimize, and decide whether
96        or not to accept the new coordinates.
97        """
98        # Take a random step.  Make a copy of x because the step_taking
99        # algorithm might change x in place
100        x_after_step = np.copy(self.x)
101        x_after_step = self.step_taking(x_after_step)
102
103        # do a local minimization
104        minres = self.minimizer(x_after_step)
105        x_after_quench = minres.x
106        energy_after_quench = minres.fun
107        if not minres.success:
108            self.res.minimization_failures += 1
109            if self.disp:
110                print("warning: basinhopping: local minimization failure")
111        if hasattr(minres, "nfev"):
112            self.res.nfev += minres.nfev
113        if hasattr(minres, "njev"):
114            self.res.njev += minres.njev
115        if hasattr(minres, "nhev"):
116            self.res.nhev += minres.nhev
117
118        # accept the move based on self.accept_tests. If any test is false,
119        # than reject the step.  If any test returns the special value, the
120        # string 'force accept', accept the step regardless.  This can be used
121        # to forcefully escape from a local minima if normal basin hopping
122        # steps are not sufficient.
123        accept = True
124        for test in self.accept_tests:
125            testres = test(f_new=energy_after_quench, x_new=x_after_quench,
126                           f_old=self.energy, x_old=self.x)
127            if isinstance(testres, bool):
128                if not testres:
129                    accept = False
130            elif isinstance(testres, str):
131                if testres == "force accept":
132                    accept = True
133                    break
134                else:
135                    raise ValueError("accept test must return bool or string "
136                                     "'force accept'. Type is", type(testres))
137            else:
138                raise ValueError("accept test must return bool or string "
139                                 "'force accept'. Type is", type(testres))
140
141        # Report the result of the acceptance test to the take step class.
142        # This is for adaptive step taking
143        if hasattr(self.step_taking, "report"):
144            self.step_taking.report(accept, f_new=energy_after_quench,
145                                    x_new=x_after_quench, f_old=self.energy,
146                                    x_old=self.x)
147
148        return x_after_quench, energy_after_quench, accept
149
150    def one_cycle(self):
151        """Do one cycle of the basinhopping algorithm
152        """
153        self.nstep += 1
154        new_global_min = False
155
156        xtrial, energy_trial, accept = self._monte_carlo_step()
157
158        if accept:
159            self.energy = energy_trial
160            self.x = np.copy(xtrial)
161            new_global_min = self.storage.update(self.x, self.energy)
162
163        # print some information
164        if self.disp:
165            self.print_report(energy_trial, accept)
166            if new_global_min:
167                print("found new global minimum on step %d with function"
168                      " value %g" % (self.nstep, self.energy))
169
170        # save some variables as BasinHoppingRunner attributes
171        self.xtrial = xtrial
172        self.energy_trial = energy_trial
173        self.accept = accept
174
175        return new_global_min
176
177    def print_report(self, energy_trial, accept):
178        """print a status update"""
179        xlowest, energy_lowest = self.storage.get_lowest()
180        print("basinhopping step %d: f %g trial_f %g accepted %d "
181              " lowest_f %g" % (self.nstep, self.energy, energy_trial,
182                                accept, energy_lowest))
183
184
185class AdaptiveStepsize(object):
186    """
187    Class to implement adaptive stepsize.
188
189    This class wraps the step taking class and modifies the stepsize to
190    ensure the true acceptance rate is as close as possible to the target.
191
192    Parameters
193    ----------
194    takestep : callable
195        The step taking routine.  Must contain modifiable attribute
196        takestep.stepsize
197    accept_rate : float, optional
198        The target step acceptance rate
199    interval : int, optional
200        Interval for how often to update the stepsize
201    factor : float, optional
202        The step size is multiplied or divided by this factor upon each
203        update.
204    verbose : bool, optional
205        Print information about each update
206
207    """
208    def __init__(self, takestep, accept_rate=0.5, interval=50, factor=0.9,
209                 verbose=True):
210        self.takestep = takestep
211        self.target_accept_rate = accept_rate
212        self.interval = interval
213        self.factor = factor
214        self.verbose = verbose
215
216        self.nstep = 0
217        self.nstep_tot = 0
218        self.naccept = 0
219
220    def __call__(self, x):
221        return self.take_step(x)
222
223    def _adjust_step_size(self):
224        old_stepsize = self.takestep.stepsize
225        accept_rate = float(self.naccept) / self.nstep
226        if accept_rate > self.target_accept_rate:
227            #We're accepting too many steps.  This generally means we're
228            #trapped in a basin.  Take bigger steps
229            self.takestep.stepsize /= self.factor
230        else:
231            #We're not accepting enough steps.  Take smaller steps
232            self.takestep.stepsize *= self.factor
233        if self.verbose:
234            print("adaptive stepsize: acceptance rate %f target %f new "
235                  "stepsize %g old stepsize %g" % (accept_rate,
236                  self.target_accept_rate, self.takestep.stepsize,
237                  old_stepsize))
238
239    def take_step(self, x):
240        self.nstep += 1
241        self.nstep_tot += 1
242        if self.nstep % self.interval == 0:
243            self._adjust_step_size()
244        return self.takestep(x)
245
246    def report(self, accept, **kwargs):
247        "called by basinhopping to report the result of the step"
248        if accept:
249            self.naccept += 1
250
251
252class RandomDisplacement(object):
253    """
254    Add a random displacement of maximum size, stepsize, to the coordinates
255
256    update x inplace
257    """
258    def __init__(self, stepsize=0.5):
259        self.stepsize = stepsize
260
261    def __call__(self, x):
262        x += np.random.uniform(-self.stepsize, self.stepsize, np.shape(x))
263        return x
264
265
266class MinimizerWrapper(object):
267    """
268    wrap a minimizer function as a minimizer class
269    """
270    def __init__(self, minimizer, func=None, **kwargs):
271        self.minimizer = minimizer
272        self.func = func
273        self.kwargs = kwargs
274
275    def __call__(self, x0):
276        if self.func is None:
277            return self.minimizer(x0, **self.kwargs)
278        else:
279            return self.minimizer(self.func, x0, **self.kwargs)
280
281
282class Metropolis(object):
283    """
284    Metropolis acceptance criterion
285    """
286    def __init__(self, T):
287        self.beta = 1.0 / T
288
289    def accept_reject(self, energy_new, energy_old):
290        w = min(1.0, np.exp(-(energy_new - energy_old) * self.beta))
291        rand = np.random.rand()
292        return w >= rand
293
294    def __call__(self, **kwargs):
295        """
296        f_new and f_old are mandatory in kwargs
297        """
298        return bool(self.accept_reject(kwargs["f_new"],
299                    kwargs["f_old"]))
300
301
302def basinhopping(func, x0, niter=100, T=1.0, stepsize=0.5,
303                 minimizer_kwargs=None, take_step=None, accept_test=None,
304                 callback=None, interval=50, disp=False, niter_success=None):
305    """
306    Find the global minimum of a function using the basin-hopping algorithm
307
308    .. versionadded:: 0.12.0
309
310    Parameters
311    ----------
312    func : callable ``f(x, *args)``
313        Function to be optimized.  ``args`` can be passed as an optional item
314        in the dict ``minimizer_kwargs``
315    x0 : ndarray
316        Initial guess.
317    niter : integer, optional
318        The number of basin hopping iterations
319    T : float, optional
320        The "temperature" parameter for the accept or reject criterion.  Higher
321        "temperatures" mean that larger jumps in function value will be
322        accepted.  For best results ``T`` should be comparable to the
323        separation
324        (in function value) between local minima.
325    stepsize : float, optional
326        initial step size for use in the random displacement.
327    minimizer_kwargs : dict, optional
328        Extra keyword arguments to be passed to the minimizer
329        ``scipy.optimize.minimize()`` Some important options could be:
330            method : str
331                The minimization method (e.g. ``"L-BFGS-B"``)
332            args : tuple
333                Extra arguments passed to the objective function (``func``) and
334                its derivatives (Jacobian, Hessian).
335
336    take_step : callable ``take_step(x)``, optional
337        Replace the default step taking routine with this routine.  The default
338        step taking routine is a random displacement of the coordinates, but
339        other step taking algorithms may be better for some systems.
340        ``take_step`` can optionally have the attribute ``take_step.stepsize``.
341        If this attribute exists, then ``basinhopping`` will adjust
342        ``take_step.stepsize`` in order to try to optimize the global minimum
343        search.
344    accept_test : callable, ``accept_test(f_new=f_new, x_new=x_new, f_old=fold, x_old=x_old)``, optional
345        Define a test which will be used to judge whether or not to accept the
346        step.  This will be used in addition to the Metropolis test based on
347        "temperature" ``T``.  The acceptable return values are True,
348        False, or ``"force accept"``.  If the latter, then this will
349        override any other tests in order to accept the step.  This can be
350        used, for example, to forcefully escape from a local minimum that
351        ``basinhopping`` is trapped in.
352    callback : callable, ``callback(x, f, fmin, accept)``, optional
353        A callback function which will be called for all minimum found.  ``x``
354        and ``f`` are the coordinates and function value of the trial minima,
355        and ``accept`` is whether or not that minima was accepted.  'fmin' is the lowest f found.
356        This can be used, for example, to save the lowest N minima found.  Also,
357        ``callback`` can be used to specify a user defined stop criterion by
358        optionally returning True to stop the ``basinhopping`` routine.
359    interval : integer, optional
360        interval for how often to update the ``stepsize``
361    disp : bool, optional
362        Set to True to print status messages
363    niter_success : integer, optional
364        Stop the run if the global minimum candidate remains the same for this
365        number of iterations.
366
367    Returns
368    -------
369    res : OptimizeResult
370        The optimization result represented as a ``OptimizeResult`` object.  Important
371        attributes are: ``x`` the solution array, ``fun`` the value of the
372        function at the solution, and ``message`` which describes the cause of
373        the termination. See `OptimizeResult` for a description of other attributes.
374
375    See Also
376    --------
377    minimize :
378        The local minimization function called once for each basinhopping step.
379        ``minimizer_kwargs`` is passed to this routine.
380
381    Notes
382    -----
383    Basin-hopping is a stochastic algorithm which attempts to find the global
384    minimum of a smooth scalar function of one or more variables [1]_ [2]_ [3]_
385    [4]_.  The algorithm in its current form was described by David Wales and
386    Jonathan Doye [2]_ http://www-wales.ch.cam.ac.uk/.
387
388    The algorithm is iterative with each cycle composed of the following
389    features
390
391    1) random perturbation of the coordinates
392
393    2) local minimization
394
395    3) accept or reject the new coordinates based on the minimized function
396       value
397
398    The acceptance test used here is the Metropolis criterion of standard Monte
399    Carlo algorithms, although there are many other possibilities [3]_.
400
401    This global minimization method has been shown to be extremely efficient
402    for a wide variety of problems in physics and chemistry.  It is
403    particularly useful when the function has many minima separated by large
404    barriers. See the Cambridge Cluster Database
405    http://www-wales.ch.cam.ac.uk/CCD.html for databases of molecular systems
406    that have been optimized primarily using basin-hopping.  This database
407    includes minimization problems exceeding 300 degrees of freedom.
408
409    See the free software program GMIN (http://www-wales.ch.cam.ac.uk/GMIN) for
410    a Fortran implementation of basin-hopping.  This implementation has many
411    different variations of the procedure described above, including more
412    advanced step taking algorithms and alternate acceptance criterion.
413
414    For stochastic global optimization there is no way to determine if the true
415    global minimum has actually been found. Instead, as a consistency check,
416    the algorithm can be run from a number of different random starting points
417    to ensure the lowest minimum found in each example has converged to the
418    global minimum.  For this reason ``basinhopping`` will by default simply
419    run for the number of iterations ``niter`` and return the lowest minimum
420    found.  It is left to the user to ensure that this is in fact the global
421    minimum.
422
423    Choosing ``stepsize``:  This is a crucial parameter in ``basinhopping`` and
424    depends on the problem being solved.  Ideally it should be comparable to
425    the typical separation between local minima of the function being
426    optimized.  ``basinhopping`` will, by default, adjust ``stepsize`` to find
427    an optimal value, but this may take many iterations.  You will get quicker
428    results if you set a sensible value for ``stepsize``.
429
430    Choosing ``T``: The parameter ``T`` is the temperature used in the
431    metropolis criterion.  Basinhopping steps are accepted with probability
432    ``1`` if ``func(xnew) < func(xold)``, or otherwise with probability::
433
434        exp( -(func(xnew) - func(xold)) / T )
435
436    So, for best results, ``T`` should to be comparable to the typical
437    difference in function values between local minima.
438
439    References
440    ----------
441    .. [1] Wales, David J. 2003, Energy Landscapes, Cambridge University Press,
442        Cambridge, UK.
443    .. [2] Wales, D J, and Doye J P K, Global Optimization by Basin-Hopping and
444        the Lowest Energy Structures of Lennard-Jones Clusters Containing up to
445        110 Atoms.  Journal of Physical Chemistry A, 1997, 101, 5111.
446    .. [3] Li, Z. and Scheraga, H. A., Monte Carlo-minimization approach to the
447        multiple-minima problem in protein folding, Proc. Natl. Acad. Sci. USA,
448        1987, 84, 6611.
449    .. [4] Wales, D. J. and Scheraga, H. A., Global optimization of clusters,
450        crystals, and biomolecules, Science, 1999, 285, 1368.
451
452    Examples
453    --------
454    The following example is a one-dimensional minimization problem,  with many
455    local minima superimposed on a parabola.
456
457    >>> func = lambda x: cos(14.5 * x - 0.3) + (x + 0.2) * x
458    >>> x0=[1.]
459
460    Basinhopping, internally, uses a local minimization algorithm.  We will use
461    the parameter ``minimizer_kwargs`` to tell basinhopping which algorithm to
462    use and how to set up that minimizer.  This parameter will be passed to
463    ``scipy.optimize.minimize()``.
464
465    >>> minimizer_kwargs = {"method": "BFGS"}
466    >>> ret = basinhopping(func, x0, minimizer_kwargs=minimizer_kwargs,
467    ...                    niter=200)
468    >>> print("global minimum: x = %.4f, f(x0) = %.4f" % (ret.x, ret.fun))
469    global minimum: x = -0.1951, f(x0) = -1.0009
470
471    Next consider a two-dimensional minimization problem. Also, this time we
472    will use gradient information to significantly speed up the search.
473
474    >>> def func2d(x):
475    ...     f = cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] +
476    ...                                                         0.2) * x[0]
477    ...     df = np.zeros(2)
478    ...     df[0] = -14.5 * sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2
479    ...     df[1] = 2. * x[1] + 0.2
480    ...     return f, df
481
482    We'll also use a different local minimization algorithm.  Also we must tell
483    the minimizer that our function returns both energy and gradient (jacobian)
484
485    >>> minimizer_kwargs = {"method":"L-BFGS-B", "jac":True}
486    >>> x0 = [1.0, 1.0]
487    >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
488    ...                    niter=200)
489    >>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0],
490    ...                                                           ret.x[1],
491    ...                                                           ret.fun))
492    global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109
493
494
495    Here is an example using a custom step taking routine.  Imagine you want
496    the first coordinate to take larger steps then the rest of the coordinates.
497    This can be implemented like so:
498
499    >>> class MyTakeStep(object):
500    ...    def __init__(self, stepsize=0.5):
501    ...        self.stepsize = stepsize
502    ...    def __call__(self, x):
503    ...        s = self.stepsize
504    ...        x[0] += np.random.uniform(-2.*s, 2.*s)
505    ...        x[1:] += np.random.uniform(-s, s, x[1:].shape)
506    ...        return x
507
508    Since ``MyTakeStep.stepsize`` exists basinhopping will adjust the magnitude
509    of ``stepsize`` to optimize the search.  We'll use the same 2-D function as
510    before
511
512    >>> mytakestep = MyTakeStep()
513    >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
514    ...                    niter=200, take_step=mytakestep)
515    >>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0],
516    ...                                                           ret.x[1],
517    ...                                                           ret.fun))
518    global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109
519
520
521    Now let's do an example using a custom callback function which prints the
522    value of every minimum found
523
524    >>> def print_fun(x, f, accepted):
525    ...         print("at minima %.4f accepted %d" % (f, int(accepted)))
526
527    We'll run it for only 10 basinhopping steps this time.
528
529    >>> np.random.seed(1)
530    >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
531    ...                    niter=10, callback=print_fun)
532    at minima 0.4159 accepted 1
533    at minima -0.9073 accepted 1
534    at minima -0.1021 accepted 1
535    at minima -0.1021 accepted 1
536    at minima 0.9102 accepted 1
537    at minima 0.9102 accepted 1
538    at minima 2.2945 accepted 0
539    at minima -0.1021 accepted 1
540    at minima -1.0109 accepted 1
541    at minima -1.0109 accepted 1
542
543
544    The minima at -1.0109 is actually the global minimum, found already on the
545    8th iteration.
546
547    Now let's implement bounds on the problem using a custom ``accept_test``:
548
549    >>> class MyBounds(object):
550    ...     def __init__(self, xmax=[1.1,1.1], xmin=[-1.1,-1.1] ):
551    ...         self.xmax = np.array(xmax)
552    ...         self.xmin = np.array(xmin)
553    ...     def __call__(self, **kwargs):
554    ...         x = kwargs["x_new"]
555    ...         tmax = bool(np.all(x <= self.xmax))
556    ...         tmin = bool(np.all(x >= self.xmin))
557    ...         return tmax and tmin
558
559    >>> mybounds = MyBounds()
560    >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs,
561    ...                    niter=10, accept_test=mybounds)
562
563    """
564    x0 = np.array(x0)
565
566    # set up minimizer
567    if minimizer_kwargs is None:
568        minimizer_kwargs = dict()
569    wrapped_minimizer = MinimizerWrapper(scipy.optimize.minimize, func,
570                                         **minimizer_kwargs)
571
572    # set up step taking algorithm
573    if take_step is not None:
574        if not isinstance(take_step, collections.Callable):
575            raise TypeError("take_step must be callable")
576        # if take_step.stepsize exists then use AdaptiveStepsize to control
577        # take_step.stepsize
578        if hasattr(take_step, "stepsize"):
579            take_step_wrapped = AdaptiveStepsize(take_step, interval=interval,
580                                                 verbose=disp)
581        else:
582            take_step_wrapped = take_step
583    else:
584        # use default
585        displace = RandomDisplacement(stepsize=stepsize)
586        take_step_wrapped = AdaptiveStepsize(displace, interval=interval,
587                                             verbose=disp)
588
589    # set up accept tests
590    if accept_test is not None:
591        if not isinstance(accept_test, collections.Callable):
592            raise TypeError("accept_test must be callable")
593        accept_tests = [accept_test]
594    else:
595        accept_tests = []
596    # use default
597    metropolis = Metropolis(T)
598    accept_tests.append(metropolis)
599
600    if niter_success is None:
601        niter_success = niter + 2
602
603    bh = BasinHoppingRunner(x0, wrapped_minimizer, take_step_wrapped,
604                            accept_tests, disp=disp)
605
606    # start main iteration loop
607    count = 0
608    message = ["requested number of basinhopping iterations completed"
609               " successfully"]
610    for i in range(niter):
611        new_global_min = bh.one_cycle()
612
613        if isinstance(callback, collections.Callable):
614            # should we pass a copy of x?
615            val = callback(bh.xtrial, bh.energy_trial, bh.storage.get_lowest()[1], bh.accept)
616            if val is not None:
617                if val:
618                    message = ["callback function requested stop early by"
619                               "returning True"]
620                    break
621
622        count += 1
623        if new_global_min:
624            count = 0
625        elif count > niter_success:
626            message = ["success condition satisfied"]
627            break
628
629    # prepare return object
630    lowest = bh.storage.get_lowest()
631    res = bh.res
632    res.x = np.copy(lowest[0])
633    res.fun = lowest[1]
634    res.message = message
635    res.nit = i + 1
636    return res
637
638
639def _test_func2d_nograd(x):
640    f = (cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0]
641         + 1.010876184442655)
642    return f
643
644
645def _test_func2d(x):
646    f = (cos(14.5 * x[0] - 0.3) + (x[0] + 0.2) * x[0] + cos(14.5 * x[1] -
647         0.3) + (x[1] + 0.2) * x[1] + x[0] * x[1] + 1.963879482144252)
648    df = np.zeros(2)
649    df[0] = -14.5 * sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 + x[1]
650    df[1] = -14.5 * sin(14.5 * x[1] - 0.3) + 2. * x[1] + 0.2 + x[0]
651    return f, df
652
653if __name__ == "__main__":
654    print("\n\nminimize a 2d function without gradient")
655    # minimum expected at ~[-0.195, -0.1]
656    kwargs = {"method": "L-BFGS-B"}
657    x0 = np.array([1.0, 1.])
658    scipy.optimize.minimize(_test_func2d_nograd, x0, **kwargs)
659    ret = basinhopping(_test_func2d_nograd, x0, minimizer_kwargs=kwargs,
660                       niter=200, disp=False)
661    print("minimum expected at  func([-0.195, -0.1]) = 0.0")
662    print(ret)
663
664    print("\n\ntry a harder 2d problem")
665    kwargs = {"method": "L-BFGS-B", "jac": True}
666    x0 = np.array([1.0, 1.0])
667    ret = basinhopping(_test_func2d, x0, minimizer_kwargs=kwargs, niter=200,
668                       disp=False)
669    print("minimum expected at ~, func([-0.19415263, -0.19415263]) = 0")
670    print(ret)
671
Note: See TracBrowser for help on using the repository browser.