Coverage for /builds/kinetik161/ase/ase/optimize/sciopt.py: 68.22%

107 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-12-10 11:04 +0000

1from typing import IO, Optional, Union 

2 

3import numpy as np 

4import scipy.optimize as opt 

5 

6from ase import Atoms 

7from ase.optimize.optimize import Optimizer 

8 

9 

10class Converged(Exception): 

11 pass 

12 

13 

14class OptimizerConvergenceError(Exception): 

15 pass 

16 

17 

18class SciPyOptimizer(Optimizer): 

19 """General interface for SciPy optimizers 

20 

21 Only the call to the optimizer is still needed 

22 """ 

23 

24 def __init__( 

25 self, 

26 atoms: Atoms, 

27 logfile: Union[IO, str] = '-', 

28 trajectory: Optional[str] = None, 

29 callback_always: bool = False, 

30 alpha: float = 70.0, 

31 master: Optional[bool] = None, 

32 force_consistent=Optimizer._deprecated, 

33 ): 

34 """Initialize object 

35 

36 Parameters: 

37 

38 atoms: Atoms object 

39 The Atoms object to relax. 

40 

41 trajectory: string 

42 Pickle file used to store trajectory of atomic movement. 

43 

44 logfile: file object or str 

45 If *logfile* is a string, a file with that name will be opened. 

46 Use '-' for stdout. 

47 

48 callback_always: book 

49 Should the callback be run after each force call (also in the 

50 linesearch) 

51 

52 alpha: float 

53 Initial guess for the Hessian (curvature of energy surface). A 

54 conservative value of 70.0 is the default, but number of needed 

55 steps to converge might be less if a lower value is used. However, 

56 a lower value also means risk of instability. 

57 

58 master: boolean 

59 Defaults to None, which causes only rank 0 to save files. If 

60 set to true, this rank will save files. 

61 

62 """ 

63 restart = None 

64 Optimizer.__init__(self, atoms, restart, logfile, trajectory, 

65 master, force_consistent=force_consistent) 

66 self.force_calls = 0 

67 self.callback_always = callback_always 

68 self.H0 = alpha 

69 self.max_steps = 0 

70 

71 def x0(self): 

72 """Return x0 in a way SciPy can use 

73 

74 This class is mostly usable for subclasses wanting to redefine the 

75 parameters (and the objective function)""" 

76 return self.optimizable.get_positions().reshape(-1) 

77 

78 def f(self, x): 

79 """Objective function for use of the optimizers""" 

80 self.optimizable.set_positions(x.reshape(-1, 3)) 

81 # Scale the problem as SciPy uses I as initial Hessian. 

82 return self.optimizable.get_potential_energy() / self.H0 

83 

84 def fprime(self, x): 

85 """Gradient of the objective function for use of the optimizers""" 

86 self.optimizable.set_positions(x.reshape(-1, 3)) 

87 self.force_calls += 1 

88 

89 if self.callback_always: 

90 self.callback(x) 

91 

92 # Remember that forces are minus the gradient! 

93 # Scale the problem as SciPy uses I as initial Hessian. 

94 return - self.optimizable.get_forces().reshape(-1) / self.H0 

95 

96 def callback(self, x): 

97 """Callback function to be run after each iteration by SciPy 

98 

99 This should also be called once before optimization starts, as SciPy 

100 optimizers only calls it after each iteration, while ase optimizers 

101 call something similar before as well. 

102 

103 :meth:`callback`() can raise a :exc:`Converged` exception to signal the 

104 optimisation is complete. This will be silently ignored by 

105 :meth:`run`(). 

106 """ 

107 if self.nsteps < self.max_steps: 

108 self.nsteps += 1 

109 f = self.optimizable.get_forces() 

110 self.log(f) 

111 self.call_observers() 

112 if self.converged(f): 

113 raise Converged 

114 

115 def run(self, fmax=0.05, steps=100000000): 

116 self.fmax = fmax 

117 

118 try: 

119 # As SciPy does not log the zeroth iteration, we do that manually 

120 if self.nsteps == 0: 

121 self.log() 

122 self.call_observers() 

123 

124 self.max_steps = steps + self.nsteps 

125 

126 # Scale the problem as SciPy uses I as initial Hessian. 

127 self.call_fmin(fmax / self.H0, steps) 

128 except Converged: 

129 pass 

130 return self.converged() 

131 

132 def dump(self, data): 

133 pass 

134 

135 def load(self): 

136 pass 

137 

138 def call_fmin(self, fmax, steps): 

139 raise NotImplementedError 

140 

141 

142class SciPyFminCG(SciPyOptimizer): 

143 """Non-linear (Polak-Ribiere) conjugate gradient algorithm""" 

144 

145 def call_fmin(self, fmax, steps): 

146 output = opt.fmin_cg(self.f, 

147 self.x0(), 

148 fprime=self.fprime, 

149 # args=(), 

150 gtol=fmax * 0.1, # Should never be reached 

151 norm=np.inf, 

152 # epsilon= 

153 maxiter=steps, 

154 full_output=1, 

155 disp=0, 

156 # retall=0, 

157 callback=self.callback) 

158 warnflag = output[-1] 

159 if warnflag == 2: 

160 raise OptimizerConvergenceError( 

161 'Warning: Desired error not necessarily achieved ' 

162 'due to precision loss') 

163 

164 

165class SciPyFminBFGS(SciPyOptimizer): 

166 """Quasi-Newton method (Broydon-Fletcher-Goldfarb-Shanno)""" 

167 

168 def call_fmin(self, fmax, steps): 

169 output = opt.fmin_bfgs(self.f, 

170 self.x0(), 

171 fprime=self.fprime, 

172 # args=(), 

173 gtol=fmax * 0.1, # Should never be reached 

174 norm=np.inf, 

175 # epsilon=1.4901161193847656e-08, 

176 maxiter=steps, 

177 full_output=1, 

178 disp=0, 

179 # retall=0, 

180 callback=self.callback) 

181 warnflag = output[-1] 

182 if warnflag == 2: 

183 raise OptimizerConvergenceError( 

184 'Warning: Desired error not necessarily achieved ' 

185 'due to precision loss') 

186 

187 

188class SciPyGradientlessOptimizer(Optimizer): 

189 """General interface for gradient less SciPy optimizers 

190 

191 Only the call to the optimizer is still needed 

192 

193 Note: If you redefine x0() and f(), you don't even need an atoms object. 

194 Redefining these also allows you to specify an arbitrary objective 

195 function. 

196 

197 XXX: This is still a work in progress 

198 """ 

199 

200 def __init__(self, atoms, logfile='-', trajectory=None, 

201 callback_always=False, master=None, 

202 force_consistent=Optimizer._deprecated): 

203 """Initialize object 

204 

205 Parameters: 

206 

207 atoms: Atoms object 

208 The Atoms object to relax. 

209 

210 trajectory: string 

211 Pickle file used to store trajectory of atomic movement. 

212 

213 logfile: file object or str 

214 If *logfile* is a string, a file with that name will be opened. 

215 Use '-' for stdout. 

216 

217 callback_always: book 

218 Should the callback be run after each force call (also in the 

219 linesearch) 

220 

221 alpha: float 

222 Initial guess for the Hessian (curvature of energy surface). A 

223 conservative value of 70.0 is the default, but number of needed 

224 steps to converge might be less if a lower value is used. However, 

225 a lower value also means risk of instability. 

226 

227 master: boolean 

228 Defaults to None, which causes only rank 0 to save files. If 

229 set to true, this rank will save files. 

230 

231 """ 

232 restart = None 

233 Optimizer.__init__(self, atoms, restart, logfile, trajectory, 

234 master, force_consistent=force_consistent) 

235 self.function_calls = 0 

236 self.callback_always = callback_always 

237 

238 def x0(self): 

239 """Return x0 in a way SciPy can use 

240 

241 This class is mostly usable for subclasses wanting to redefine the 

242 parameters (and the objective function)""" 

243 return self.optimizable.get_positions().reshape(-1) 

244 

245 def f(self, x): 

246 """Objective function for use of the optimizers""" 

247 self.optimizable.set_positions(x.reshape(-1, 3)) 

248 self.function_calls += 1 

249 # Scale the problem as SciPy uses I as initial Hessian. 

250 return self.optimizable.get_potential_energy() 

251 

252 def callback(self, x): 

253 """Callback function to be run after each iteration by SciPy 

254 

255 This should also be called once before optimization starts, as SciPy 

256 optimizers only calls it after each iteration, while ase optimizers 

257 call something similar before as well. 

258 """ 

259 # We can't assume that forces are available! 

260 # f = self.optimizable.get_forces() 

261 # self.log(f) 

262 self.call_observers() 

263 # if self.converged(f): 

264 # raise Converged 

265 self.nsteps += 1 

266 

267 def run(self, ftol=0.01, xtol=0.01, steps=100000000): 

268 self.xtol = xtol 

269 self.ftol = ftol 

270 # As SciPy does not log the zeroth iteration, we do that manually 

271 self.callback(None) 

272 try: 

273 # Scale the problem as SciPy uses I as initial Hessian. 

274 self.call_fmin(xtol, ftol, steps) 

275 except Converged: 

276 pass 

277 return self.converged() 

278 

279 def dump(self, data): 

280 pass 

281 

282 def load(self): 

283 pass 

284 

285 def call_fmin(self, xtol, ftol, steps): 

286 raise NotImplementedError 

287 

288 

289class SciPyFmin(SciPyGradientlessOptimizer): 

290 """Nelder-Mead Simplex algorithm 

291 

292 Uses only function calls. 

293 

294 XXX: This is still a work in progress 

295 """ 

296 

297 def call_fmin(self, xtol, ftol, steps): 

298 opt.fmin(self.f, 

299 self.x0(), 

300 # args=(), 

301 xtol=xtol, 

302 ftol=ftol, 

303 maxiter=steps, 

304 # maxfun=None, 

305 # full_output=1, 

306 disp=0, 

307 # retall=0, 

308 callback=self.callback) 

309 

310 

311class SciPyFminPowell(SciPyGradientlessOptimizer): 

312 """Powell's (modified) level set method 

313 

314 Uses only function calls. 

315 

316 XXX: This is still a work in progress 

317 """ 

318 

319 def __init__(self, *args, **kwargs): 

320 """Parameters: 

321 

322 direc: float 

323 How much to change x to initially. Defaults to 0.04. 

324 """ 

325 direc = kwargs.pop('direc', None) 

326 SciPyGradientlessOptimizer.__init__(self, *args, **kwargs) 

327 

328 if direc is None: 

329 self.direc = np.eye(len(self.x0()), dtype=float) * 0.04 

330 else: 

331 self.direc = np.eye(len(self.x0()), dtype=float) * direc 

332 

333 def call_fmin(self, xtol, ftol, steps): 

334 opt.fmin_powell(self.f, 

335 self.x0(), 

336 # args=(), 

337 xtol=xtol, 

338 ftol=ftol, 

339 maxiter=steps, 

340 # maxfun=None, 

341 # full_output=1, 

342 disp=0, 

343 # retall=0, 

344 callback=self.callback, 

345 direc=self.direc)