1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

317

318

319

320

321

322

323

324

325

326

327

328

329

330

331

332

333

334

335

336

337

338

339

340

341

342

343

344

345

346

347

348

349

350

351

352

353

354

355

356

357

358

359

360

361

362

363

364

365

366

367

368

369

370

371

372

373

374

375

376

377

378

379

380

381

382

383

384

385

386

387

388

389

390

391

392

393

394

395

396

397

398

399

400

401

402

403

404

405

406

407

408

409

410

411

412

413

414

415

416

417

418

419

420

421

422

423

424

425

426

427

428

429

430

431

432

433

434

435

436

437

438

439

440

441

442

443

444

445

446

447

448

449

450

451

452

453

454

455

456

457

458

459

460

461

462

463

464

465

466

467

468

469

470

471

472

473

474

475

476

477

478

479

480

481

482

483

484

485

486

487

488

489

490

491

492

493

494

495

496

497

498

499

500

501

502

503

504

505

506

507

508

509

510

511

512

513

514

515

516

517

518

519

520

521

522

523

524

525

526

527

528

529

530

531

532

533

534

535

536

537

538

539

540

541

542

543

544

545

546

547

548

549

550

551

552

553

554

555

556

557

558

559

560

561

562

563

564

565

566

567

568

569

570

571

572

573

574

575

576

577

578

579

580

581

582

583

584

585

586

587

588

589

590

591

592

593

594

595

596

597

598

599

600

601

602

603

604

605

606

607

608

609

610

611

612

613

614

615

616

617

618

619

620

621

622

623

624

625

626

627

628

629

630

631

632

633

634

635

636

637

638

639

640

641

642

643

644

645

646

647

648

649

650

651

652

653

654

655

656

657

658

659

660

661

662

663

664

665

666

667

668

669

670

671

672

673

674

675

676

677

678

679

680

681

682

683

684

685

686

687

688

689

690

691

692

693

694

695

696

697

698

699

700

701

702

703

704

705

706

707

708

709

710

711

712

713

714

715

716

717

718

719

720

721

722

723

724

725

726

727

728

729

730

731

732

733

734

735

736

737

""" 

basinhopping: The basinhopping global optimization algorithm 

""" 

from __future__ import division, print_function, absolute_import 

 

import numpy as np 

import math 

from numpy import cos, sin 

import scipy.optimize 

import collections 

from scipy._lib._util import check_random_state 

 

__all__ = ['basinhopping'] 

 

 

class Storage(object): 

""" 

Class used to store the lowest energy structure 

""" 

def __init__(self, minres): 

self._add(minres) 

 

def _add(self, minres): 

self.minres = minres 

self.minres.x = np.copy(minres.x) 

 

def update(self, minres): 

if minres.fun < self.minres.fun: 

self._add(minres) 

return True 

else: 

return False 

 

def get_lowest(self): 

return self.minres 

 

 

class BasinHoppingRunner(object): 

"""This class implements the core of the basinhopping algorithm. 

 

x0 : ndarray 

The starting coordinates. 

minimizer : callable 

The local minimizer, with signature ``result = minimizer(x)``. 

The return value is an `optimize.OptimizeResult` object. 

step_taking : callable 

This function displaces the coordinates randomly. Signature should 

be ``x_new = step_taking(x)``. Note that `x` may be modified in-place. 

accept_tests : list of callables 

Each test is passed the kwargs `f_new`, `x_new`, `f_old` and 

`x_old`. These tests will be used to judge whether or not to accept 

the step. The acceptable return values are True, False, or ``"force 

accept"``. If any of the tests return False then the step is rejected. 

If ``"force accept"``, then this will override any other tests in 

order to accept the step. This can be used, for example, to forcefully 

escape from a local minimum that ``basinhopping`` is trapped in. 

disp : bool, optional 

Display status messages. 

 

""" 

def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False): 

self.x = np.copy(x0) 

self.minimizer = minimizer 

self.step_taking = step_taking 

self.accept_tests = accept_tests 

self.disp = disp 

 

self.nstep = 0 

 

# initialize return object 

self.res = scipy.optimize.OptimizeResult() 

self.res.minimization_failures = 0 

 

# do initial minimization 

minres = minimizer(self.x) 

if not minres.success: 

self.res.minimization_failures += 1 

if self.disp: 

print("warning: basinhopping: local minimization failure") 

self.x = np.copy(minres.x) 

self.energy = minres.fun 

if self.disp: 

print("basinhopping step %d: f %g" % (self.nstep, self.energy)) 

 

# initialize storage class 

self.storage = Storage(minres) 

 

if hasattr(minres, "nfev"): 

self.res.nfev = minres.nfev 

if hasattr(minres, "njev"): 

self.res.njev = minres.njev 

if hasattr(minres, "nhev"): 

self.res.nhev = minres.nhev 

 

def _monte_carlo_step(self): 

"""Do one Monte Carlo iteration 

 

Randomly displace the coordinates, minimize, and decide whether 

or not to accept the new coordinates. 

""" 

# Take a random step. Make a copy of x because the step_taking 

# algorithm might change x in place 

x_after_step = np.copy(self.x) 

x_after_step = self.step_taking(x_after_step) 

 

# do a local minimization 

minres = self.minimizer(x_after_step) 

x_after_quench = minres.x 

energy_after_quench = minres.fun 

if not minres.success: 

self.res.minimization_failures += 1 

if self.disp: 

print("warning: basinhopping: local minimization failure") 

 

if hasattr(minres, "nfev"): 

self.res.nfev += minres.nfev 

if hasattr(minres, "njev"): 

self.res.njev += minres.njev 

if hasattr(minres, "nhev"): 

self.res.nhev += minres.nhev 

 

# accept the move based on self.accept_tests. If any test is False, 

# then reject the step. If any test returns the special string 

# 'force accept', then accept the step regardless. This can be used 

# to forcefully escape from a local minimum if normal basin hopping 

# steps are not sufficient. 

accept = True 

for test in self.accept_tests: 

testres = test(f_new=energy_after_quench, x_new=x_after_quench, 

f_old=self.energy, x_old=self.x) 

if testres == 'force accept': 

accept = True 

break 

elif testres is None: 

raise ValueError("accept_tests must return True, False, or " 

"'force accept'") 

elif not testres: 

accept = False 

 

# Report the result of the acceptance test to the take step class. 

# This is for adaptive step taking 

if hasattr(self.step_taking, "report"): 

self.step_taking.report(accept, f_new=energy_after_quench, 

x_new=x_after_quench, f_old=self.energy, 

x_old=self.x) 

 

return accept, minres 

 

def one_cycle(self): 

"""Do one cycle of the basinhopping algorithm 

""" 

self.nstep += 1 

new_global_min = False 

 

accept, minres = self._monte_carlo_step() 

 

if accept: 

self.energy = minres.fun 

self.x = np.copy(minres.x) 

new_global_min = self.storage.update(minres) 

 

# print some information 

if self.disp: 

self.print_report(minres.fun, accept) 

if new_global_min: 

print("found new global minimum on step %d with function" 

" value %g" % (self.nstep, self.energy)) 

 

# save some variables as BasinHoppingRunner attributes 

self.xtrial = minres.x 

self.energy_trial = minres.fun 

self.accept = accept 

 

return new_global_min 

 

def print_report(self, energy_trial, accept): 

"""print a status update""" 

minres = self.storage.get_lowest() 

print("basinhopping step %d: f %g trial_f %g accepted %d " 

" lowest_f %g" % (self.nstep, self.energy, energy_trial, 

accept, minres.fun)) 

 

 

class AdaptiveStepsize(object): 

""" 

Class to implement adaptive stepsize. 

 

This class wraps the step taking class and modifies the stepsize to 

ensure the true acceptance rate is as close as possible to the target. 

 

Parameters 

---------- 

takestep : callable 

The step taking routine. Must contain modifiable attribute 

takestep.stepsize 

accept_rate : float, optional 

The target step acceptance rate 

interval : int, optional 

Interval for how often to update the stepsize 

factor : float, optional 

The step size is multiplied or divided by this factor upon each 

update. 

verbose : bool, optional 

Print information about each update 

 

""" 

def __init__(self, takestep, accept_rate=0.5, interval=50, factor=0.9, 

verbose=True): 

self.takestep = takestep 

self.target_accept_rate = accept_rate 

self.interval = interval 

self.factor = factor 

self.verbose = verbose 

 

self.nstep = 0 

self.nstep_tot = 0 

self.naccept = 0 

 

def __call__(self, x): 

return self.take_step(x) 

 

def _adjust_step_size(self): 

old_stepsize = self.takestep.stepsize 

accept_rate = float(self.naccept) / self.nstep 

if accept_rate > self.target_accept_rate: 

# We're accepting too many steps. This generally means we're 

# trapped in a basin. Take bigger steps 

self.takestep.stepsize /= self.factor 

else: 

# We're not accepting enough steps. Take smaller steps 

self.takestep.stepsize *= self.factor 

if self.verbose: 

print("adaptive stepsize: acceptance rate %f target %f new " 

"stepsize %g old stepsize %g" % (accept_rate, 

self.target_accept_rate, self.takestep.stepsize, 

old_stepsize)) 

 

def take_step(self, x): 

self.nstep += 1 

self.nstep_tot += 1 

if self.nstep % self.interval == 0: 

self._adjust_step_size() 

return self.takestep(x) 

 

def report(self, accept, **kwargs): 

"called by basinhopping to report the result of the step" 

if accept: 

self.naccept += 1 

 

 

class RandomDisplacement(object): 

""" 

Add a random displacement of maximum size `stepsize` to each coordinate 

 

Calling this updates `x` in-place. 

 

Parameters 

---------- 

stepsize : float, optional 

Maximum stepsize in any dimension 

random_state : None or `np.random.RandomState` instance, optional 

The random number generator that generates the displacements 

""" 

def __init__(self, stepsize=0.5, random_state=None): 

self.stepsize = stepsize 

self.random_state = check_random_state(random_state) 

 

def __call__(self, x): 

x += self.random_state.uniform(-self.stepsize, self.stepsize, 

np.shape(x)) 

return x 

 

 

class MinimizerWrapper(object): 

""" 

wrap a minimizer function as a minimizer class 

""" 

def __init__(self, minimizer, func=None, **kwargs): 

self.minimizer = minimizer 

self.func = func 

self.kwargs = kwargs 

 

def __call__(self, x0): 

if self.func is None: 

return self.minimizer(x0, **self.kwargs) 

else: 

return self.minimizer(self.func, x0, **self.kwargs) 

 

 

class Metropolis(object): 

""" 

Metropolis acceptance criterion 

 

Parameters 

---------- 

T : float 

The "temperature" parameter for the accept or reject criterion. 

random_state : None or `np.random.RandomState` object 

Random number generator used for acceptance test 

""" 

def __init__(self, T, random_state=None): 

# Avoid ZeroDivisionError since "MBH can be regarded as a special case 

# of the BH framework with the Metropolis criterion, where temperature 

# T = 0." (Reject all steps that increase energy.) 

self.beta = 1.0 / T if T != 0 else float('inf') 

self.random_state = check_random_state(random_state) 

 

def accept_reject(self, energy_new, energy_old): 

""" 

If new energy is lower than old, it will always be accepted. 

If new is higher than old, there is a chance it will be accepted, 

less likely for larger differences. 

""" 

w = math.exp(min(0, -float(energy_new - energy_old) * self.beta)) 

rand = self.random_state.rand() 

return w >= rand 

 

def __call__(self, **kwargs): 

""" 

f_new and f_old are mandatory in kwargs 

""" 

return bool(self.accept_reject(kwargs["f_new"], 

kwargs["f_old"])) 

 

 

def basinhopping(func, x0, niter=100, T=1.0, stepsize=0.5, 

minimizer_kwargs=None, take_step=None, accept_test=None, 

callback=None, interval=50, disp=False, niter_success=None, 

seed=None): 

""" 

Find the global minimum of a function using the basin-hopping algorithm 

 

Basin-hopping is a two-phase method that combines a global stepping 

algorithm with local minimization at each step. Designed to mimic 

the natural process of energy minimization of clusters of atoms, it works 

well for similar problems with "funnel-like, but rugged" energy landscapes 

[5]_. 

 

As the step-taking, step acceptance, and minimization methods are all 

customizable, this function can also be used to implement other two-phase 

methods. 

 

Parameters 

---------- 

func : callable ``f(x, *args)`` 

Function to be optimized. ``args`` can be passed as an optional item 

in the dict ``minimizer_kwargs`` 

x0 : array_like 

Initial guess. 

niter : integer, optional 

The number of basin-hopping iterations 

T : float, optional 

The "temperature" parameter for the accept or reject criterion. Higher 

"temperatures" mean that larger jumps in function value will be 

accepted. For best results ``T`` should be comparable to the 

separation (in function value) between local minima. 

stepsize : float, optional 

Maximum step size for use in the random displacement. 

minimizer_kwargs : dict, optional 

Extra keyword arguments to be passed to the local minimizer 

``scipy.optimize.minimize()`` Some important options could be: 

 

method : str 

The minimization method (e.g. ``"L-BFGS-B"``) 

args : tuple 

Extra arguments passed to the objective function (``func``) and 

its derivatives (Jacobian, Hessian). 

 

take_step : callable ``take_step(x)``, optional 

Replace the default step-taking routine with this routine. The default 

step-taking routine is a random displacement of the coordinates, but 

other step-taking algorithms may be better for some systems. 

``take_step`` can optionally have the attribute ``take_step.stepsize``. 

If this attribute exists, then ``basinhopping`` will adjust 

``take_step.stepsize`` in order to try to optimize the global minimum 

search. 

accept_test : callable, ``accept_test(f_new=f_new, x_new=x_new, f_old=fold, x_old=x_old)``, optional 

Define a test which will be used to judge whether or not to accept the 

step. This will be used in addition to the Metropolis test based on 

"temperature" ``T``. The acceptable return values are True, 

False, or ``"force accept"``. If any of the tests return False 

then the step is rejected. If the latter, then this will override any 

other tests in order to accept the step. This can be used, for example, 

to forcefully escape from a local minimum that ``basinhopping`` is 

trapped in. 

callback : callable, ``callback(x, f, accept)``, optional 

A callback function which will be called for all minima found. ``x`` 

and ``f`` are the coordinates and function value of the trial minimum, 

and ``accept`` is whether or not that minimum was accepted. This can 

be used, for example, to save the lowest N minima found. Also, 

``callback`` can be used to specify a user defined stop criterion by 

optionally returning True to stop the ``basinhopping`` routine. 

interval : integer, optional 

interval for how often to update the ``stepsize`` 

disp : bool, optional 

Set to True to print status messages 

niter_success : integer, optional 

Stop the run if the global minimum candidate remains the same for this 

number of iterations. 

seed : int or `np.random.RandomState`, optional 

If `seed` is not specified the `np.RandomState` singleton is used. 

If `seed` is an int, a new `np.random.RandomState` instance is used, 

seeded with seed. 

If `seed` is already a `np.random.RandomState instance`, then that 

`np.random.RandomState` instance is used. 

Specify `seed` for repeatable minimizations. The random numbers 

generated with this seed only affect the default Metropolis 

`accept_test` and the default `take_step`. If you supply your own 

`take_step` and `accept_test`, and these functions use random 

number generation, then those functions are responsible for the state 

of their random number generator. 

 

Returns 

------- 

res : OptimizeResult 

The optimization result represented as a ``OptimizeResult`` object. 

Important attributes are: ``x`` the solution array, ``fun`` the value 

of the function at the solution, and ``message`` which describes the 

cause of the termination. The ``OptimizeResult`` object returned by the 

selected minimizer at the lowest minimum is also contained within this 

object and can be accessed through the ``lowest_optimization_result`` 

attribute. See `OptimizeResult` for a description of other attributes. 

 

See Also 

-------- 

minimize : 

The local minimization function called once for each basinhopping step. 

``minimizer_kwargs`` is passed to this routine. 

 

Notes 

----- 

Basin-hopping is a stochastic algorithm which attempts to find the global 

minimum of a smooth scalar function of one or more variables [1]_ [2]_ [3]_ 

[4]_. The algorithm in its current form was described by David Wales and 

Jonathan Doye [2]_ http://www-wales.ch.cam.ac.uk/. 

 

The algorithm is iterative with each cycle composed of the following 

features 

 

1) random perturbation of the coordinates 

 

2) local minimization 

 

3) accept or reject the new coordinates based on the minimized function 

value 

 

The acceptance test used here is the Metropolis criterion of standard Monte 

Carlo algorithms, although there are many other possibilities [3]_. 

 

This global minimization method has been shown to be extremely efficient 

for a wide variety of problems in physics and chemistry. It is 

particularly useful when the function has many minima separated by large 

barriers. See the Cambridge Cluster Database 

http://www-wales.ch.cam.ac.uk/CCD.html for databases of molecular systems 

that have been optimized primarily using basin-hopping. This database 

includes minimization problems exceeding 300 degrees of freedom. 

 

See the free software program GMIN (http://www-wales.ch.cam.ac.uk/GMIN) for 

a Fortran implementation of basin-hopping. This implementation has many 

different variations of the procedure described above, including more 

advanced step taking algorithms and alternate acceptance criterion. 

 

For stochastic global optimization there is no way to determine if the true 

global minimum has actually been found. Instead, as a consistency check, 

the algorithm can be run from a number of different random starting points 

to ensure the lowest minimum found in each example has converged to the 

global minimum. For this reason ``basinhopping`` will by default simply 

run for the number of iterations ``niter`` and return the lowest minimum 

found. It is left to the user to ensure that this is in fact the global 

minimum. 

 

Choosing ``stepsize``: This is a crucial parameter in ``basinhopping`` and 

depends on the problem being solved. The step is chosen uniformly in the 

region from x0-stepsize to x0+stepsize, in each dimension. Ideally it 

should be comparable to the typical separation (in argument values) between 

local minima of the function being optimized. ``basinhopping`` will, by 

default, adjust ``stepsize`` to find an optimal value, but this may take 

many iterations. You will get quicker results if you set a sensible 

initial value for ``stepsize``. 

 

Choosing ``T``: The parameter ``T`` is the "temperature" used in the 

Metropolis criterion. Basinhopping steps are always accepted if 

``func(xnew) < func(xold)``. Otherwise, they are accepted with 

probability:: 

 

exp( -(func(xnew) - func(xold)) / T ) 

 

So, for best results, ``T`` should to be comparable to the typical 

difference (in function values) between local minima. (The height of 

"walls" between local minima is irrelevant.) 

 

If ``T`` is 0, the algorithm becomes Monotonic Basin-Hopping, in which all 

steps that increase energy are rejected. 

 

.. versionadded:: 0.12.0 

 

References 

---------- 

.. [1] Wales, David J. 2003, Energy Landscapes, Cambridge University Press, 

Cambridge, UK. 

.. [2] Wales, D J, and Doye J P K, Global Optimization by Basin-Hopping and 

the Lowest Energy Structures of Lennard-Jones Clusters Containing up to 

110 Atoms. Journal of Physical Chemistry A, 1997, 101, 5111. 

.. [3] Li, Z. and Scheraga, H. A., Monte Carlo-minimization approach to the 

multiple-minima problem in protein folding, Proc. Natl. Acad. Sci. USA, 

1987, 84, 6611. 

.. [4] Wales, D. J. and Scheraga, H. A., Global optimization of clusters, 

crystals, and biomolecules, Science, 1999, 285, 1368. 

.. [5] Olson, B., Hashmi, I., Molloy, K., and Shehu1, A., Basin Hopping as 

a General and Versatile Optimization Framework for the Characterization 

of Biological Macromolecules, Advances in Artificial Intelligence, 

Volume 2012 (2012), Article ID 674832, :doi:`10.1155/2012/674832` 

 

Examples 

-------- 

The following example is a one-dimensional minimization problem, with many 

local minima superimposed on a parabola. 

 

>>> from scipy.optimize import basinhopping 

>>> func = lambda x: np.cos(14.5 * x - 0.3) + (x + 0.2) * x 

>>> x0=[1.] 

 

Basinhopping, internally, uses a local minimization algorithm. We will use 

the parameter ``minimizer_kwargs`` to tell basinhopping which algorithm to 

use and how to set up that minimizer. This parameter will be passed to 

``scipy.optimize.minimize()``. 

 

>>> minimizer_kwargs = {"method": "BFGS"} 

>>> ret = basinhopping(func, x0, minimizer_kwargs=minimizer_kwargs, 

... niter=200) 

>>> print("global minimum: x = %.4f, f(x0) = %.4f" % (ret.x, ret.fun)) 

global minimum: x = -0.1951, f(x0) = -1.0009 

 

Next consider a two-dimensional minimization problem. Also, this time we 

will use gradient information to significantly speed up the search. 

 

>>> def func2d(x): 

... f = np.cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 

... 0.2) * x[0] 

... df = np.zeros(2) 

... df[0] = -14.5 * np.sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 

... df[1] = 2. * x[1] + 0.2 

... return f, df 

 

We'll also use a different local minimization algorithm. Also we must tell 

the minimizer that our function returns both energy and gradient (jacobian) 

 

>>> minimizer_kwargs = {"method":"L-BFGS-B", "jac":True} 

>>> x0 = [1.0, 1.0] 

>>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, 

... niter=200) 

>>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0], 

... ret.x[1], 

... ret.fun)) 

global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109 

 

 

Here is an example using a custom step-taking routine. Imagine you want 

the first coordinate to take larger steps than the rest of the coordinates. 

This can be implemented like so: 

 

>>> class MyTakeStep(object): 

... def __init__(self, stepsize=0.5): 

... self.stepsize = stepsize 

... def __call__(self, x): 

... s = self.stepsize 

... x[0] += np.random.uniform(-2.*s, 2.*s) 

... x[1:] += np.random.uniform(-s, s, x[1:].shape) 

... return x 

 

Since ``MyTakeStep.stepsize`` exists basinhopping will adjust the magnitude 

of ``stepsize`` to optimize the search. We'll use the same 2-D function as 

before 

 

>>> mytakestep = MyTakeStep() 

>>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, 

... niter=200, take_step=mytakestep) 

>>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0], 

... ret.x[1], 

... ret.fun)) 

global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109 

 

 

Now let's do an example using a custom callback function which prints the 

value of every minimum found 

 

>>> def print_fun(x, f, accepted): 

... print("at minimum %.4f accepted %d" % (f, int(accepted))) 

 

We'll run it for only 10 basinhopping steps this time. 

 

>>> np.random.seed(1) 

>>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, 

... niter=10, callback=print_fun) 

at minimum 0.4159 accepted 1 

at minimum -0.9073 accepted 1 

at minimum -0.1021 accepted 1 

at minimum -0.1021 accepted 1 

at minimum 0.9102 accepted 1 

at minimum 0.9102 accepted 1 

at minimum 2.2945 accepted 0 

at minimum -0.1021 accepted 1 

at minimum -1.0109 accepted 1 

at minimum -1.0109 accepted 1 

 

 

The minimum at -1.0109 is actually the global minimum, found already on the 

8th iteration. 

 

Now let's implement bounds on the problem using a custom ``accept_test``: 

 

>>> class MyBounds(object): 

... def __init__(self, xmax=[1.1,1.1], xmin=[-1.1,-1.1] ): 

... self.xmax = np.array(xmax) 

... self.xmin = np.array(xmin) 

... def __call__(self, **kwargs): 

... x = kwargs["x_new"] 

... tmax = bool(np.all(x <= self.xmax)) 

... tmin = bool(np.all(x >= self.xmin)) 

... return tmax and tmin 

 

>>> mybounds = MyBounds() 

>>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, 

... niter=10, accept_test=mybounds) 

 

""" 

x0 = np.array(x0) 

 

# set up the np.random.RandomState generator 

rng = check_random_state(seed) 

 

# set up minimizer 

if minimizer_kwargs is None: 

minimizer_kwargs = dict() 

wrapped_minimizer = MinimizerWrapper(scipy.optimize.minimize, func, 

**minimizer_kwargs) 

 

# set up step-taking algorithm 

if take_step is not None: 

if not isinstance(take_step, collections.Callable): 

raise TypeError("take_step must be callable") 

# if take_step.stepsize exists then use AdaptiveStepsize to control 

# take_step.stepsize 

if hasattr(take_step, "stepsize"): 

take_step_wrapped = AdaptiveStepsize(take_step, interval=interval, 

verbose=disp) 

else: 

take_step_wrapped = take_step 

else: 

# use default 

displace = RandomDisplacement(stepsize=stepsize, random_state=rng) 

take_step_wrapped = AdaptiveStepsize(displace, interval=interval, 

verbose=disp) 

 

# set up accept tests 

if accept_test is not None: 

if not isinstance(accept_test, collections.Callable): 

raise TypeError("accept_test must be callable") 

accept_tests = [accept_test] 

else: 

accept_tests = [] 

# use default 

metropolis = Metropolis(T, random_state=rng) 

accept_tests.append(metropolis) 

 

if niter_success is None: 

niter_success = niter + 2 

 

bh = BasinHoppingRunner(x0, wrapped_minimizer, take_step_wrapped, 

accept_tests, disp=disp) 

 

# start main iteration loop 

count, i = 0, 0 

message = ["requested number of basinhopping iterations completed" 

" successfully"] 

for i in range(niter): 

new_global_min = bh.one_cycle() 

 

if isinstance(callback, collections.Callable): 

# should we pass a copy of x? 

val = callback(bh.xtrial, bh.energy_trial, bh.accept) 

if val is not None: 

if val: 

message = ["callback function requested stop early by" 

"returning True"] 

break 

 

count += 1 

if new_global_min: 

count = 0 

elif count > niter_success: 

message = ["success condition satisfied"] 

break 

 

# prepare return object 

res = bh.res 

res.lowest_optimization_result = bh.storage.get_lowest() 

res.x = np.copy(res.lowest_optimization_result.x) 

res.fun = res.lowest_optimization_result.fun 

res.message = message 

res.nit = i + 1 

return res 

 

 

def _test_func2d_nograd(x): 

f = (cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0] 

+ 1.010876184442655) 

return f 

 

 

def _test_func2d(x): 

f = (cos(14.5 * x[0] - 0.3) + (x[0] + 0.2) * x[0] + cos(14.5 * x[1] - 

0.3) + (x[1] + 0.2) * x[1] + x[0] * x[1] + 1.963879482144252) 

df = np.zeros(2) 

df[0] = -14.5 * sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 + x[1] 

df[1] = -14.5 * sin(14.5 * x[1] - 0.3) + 2. * x[1] + 0.2 + x[0] 

return f, df 

 

 

if __name__ == "__main__": 

print("\n\nminimize a 2d function without gradient") 

# minimum expected at ~[-0.195, -0.1] 

kwargs = {"method": "L-BFGS-B"} 

x0 = np.array([1.0, 1.]) 

scipy.optimize.minimize(_test_func2d_nograd, x0, **kwargs) 

ret = basinhopping(_test_func2d_nograd, x0, minimizer_kwargs=kwargs, 

niter=200, disp=False) 

print("minimum expected at func([-0.195, -0.1]) = 0.0") 

print(ret) 

 

print("\n\ntry a harder 2d problem") 

kwargs = {"method": "L-BFGS-B", "jac": True} 

x0 = np.array([1.0, 1.0]) 

ret = basinhopping(_test_func2d, x0, minimizer_kwargs=kwargs, niter=200, 

disp=False) 

print("minimum expected at ~, func([-0.19415263, -0.19415263]) = 0") 

print(ret)