Coverage for /builds/kinetik161/ase/ase/io/aims.py: 93.21%
810 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-12-10 11:04 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-12-10 11:04 +0000
1"""Defines class/functions to write input and parse output for FHI-aims."""
2import os
3import re
4import time
5import warnings
6from pathlib import Path
7from typing import Any, Dict, List, Union
9import numpy as np
11from ase import Atom, Atoms
12from ase.calculators.calculator import kpts2mp
13from ase.calculators.singlepoint import SinglePointDFTCalculator
14from ase.constraints import FixAtoms, FixCartesian
15from ase.data import atomic_numbers
16from ase.io import ParseError
17from ase.units import Ang, fs
18from ase.utils import deprecated, lazymethod, lazyproperty, reader, writer
20v_unit = Ang / (1000.0 * fs)
22LINE_NOT_FOUND = object()
25class AimsParseError(Exception):
26 """Exception raised if an error occurs when parsing an Aims output file"""
28 def __init__(self, message):
29 self.message = message
30 super().__init__(self.message)
33# Read aims geometry files
34@reader
35def read_aims(fd, apply_constraints=True):
36 """Import FHI-aims geometry type files.
38 Reads unitcell, atom positions and constraints from
39 a geometry.in file.
41 If geometric constraint (symmetry parameters) are in the file
42 include that information in atoms.info["symmetry_block"]
43 """
45 lines = fd.readlines()
46 return parse_geometry_lines(lines, apply_constraints=apply_constraints)
49def parse_geometry_lines(lines, apply_constraints=True):
51 from ase import Atoms
52 from ase.constraints import (FixAtoms, FixCartesian,
53 FixCartesianParametricRelations,
54 FixScaledParametricRelations)
56 atoms = Atoms()
58 positions = []
59 cell = []
60 symbols = []
61 velocities = []
62 magmoms = []
63 symmetry_block = []
64 charges = []
65 fix = []
66 fix_cart = []
67 xyz = np.array([0, 0, 0])
68 i = -1
69 n_periodic = -1
70 periodic = np.array([False, False, False])
71 cart_positions, scaled_positions = False, False
72 for line in lines:
73 inp = line.split()
74 if inp == []:
75 continue
76 if inp[0] in ["atom", "atom_frac"]:
78 if inp[0] == "atom":
79 cart_positions = True
80 else:
81 scaled_positions = True
83 if xyz.all():
84 fix.append(i)
85 elif xyz.any():
86 fix_cart.append(FixCartesian(i, xyz))
87 floatvect = float(inp[1]), float(inp[2]), float(inp[3])
88 positions.append(floatvect)
89 symbols.append(inp[4])
90 magmoms.append(0.0)
91 charges.append(0.0)
92 xyz = np.array([0, 0, 0])
93 i += 1
95 elif inp[0] == "lattice_vector":
96 floatvect = float(inp[1]), float(inp[2]), float(inp[3])
97 cell.append(floatvect)
98 n_periodic = n_periodic + 1
99 periodic[n_periodic] = True
101 elif inp[0] == "initial_moment":
102 magmoms[-1] = float(inp[1])
104 elif inp[0] == "initial_charge":
105 charges[-1] = float(inp[1])
107 elif inp[0] == "constrain_relaxation":
108 if inp[1] == ".true.":
109 fix.append(i)
110 elif inp[1] == "x":
111 xyz[0] = 1
112 elif inp[1] == "y":
113 xyz[1] = 1
114 elif inp[1] == "z":
115 xyz[2] = 1
117 elif inp[0] == "velocity":
118 floatvect = [v_unit * float(line) for line in inp[1:4]]
119 velocities.append(floatvect)
121 elif inp[0] in [
122 "symmetry_n_params",
123 "symmetry_params",
124 "symmetry_lv",
125 "symmetry_frac",
126 ]:
127 symmetry_block.append(" ".join(inp))
129 if xyz.all():
130 fix.append(i)
131 elif xyz.any():
132 fix_cart.append(FixCartesian(i, xyz))
134 if cart_positions and scaled_positions:
135 raise Exception(
136 "Can't specify atom positions with mixture of "
137 "Cartesian and fractional coordinates"
138 )
139 elif scaled_positions and periodic.any():
140 atoms = Atoms(
141 symbols,
142 scaled_positions=positions,
143 cell=cell,
144 pbc=periodic)
145 else:
146 atoms = Atoms(symbols, positions)
148 if len(velocities) > 0:
149 if len(velocities) != len(positions):
150 raise Exception(
151 "Number of positions and velocities have to coincide.")
152 atoms.set_velocities(velocities)
154 fix_params = []
156 if len(symmetry_block) > 5:
157 params = symmetry_block[1].split()[1:]
159 lattice_expressions = []
160 lattice_params = []
162 atomic_expressions = []
163 atomic_params = []
165 n_lat_param = int(symmetry_block[0].split(" ")[2])
167 lattice_params = params[:n_lat_param]
168 atomic_params = params[n_lat_param:]
170 for ll, line in enumerate(symmetry_block[2:]):
171 expression = " ".join(line.split(" ")[1:])
172 if ll < 3:
173 lattice_expressions += expression.split(",")
174 else:
175 atomic_expressions += expression.split(",")
177 fix_params.append(
178 FixCartesianParametricRelations.from_expressions(
179 list(range(3)),
180 lattice_params,
181 lattice_expressions,
182 use_cell=True,
183 )
184 )
186 fix_params.append(
187 FixScaledParametricRelations.from_expressions(
188 list(range(len(atoms))), atomic_params, atomic_expressions
189 )
190 )
192 if any(magmoms):
193 atoms.set_initial_magnetic_moments(magmoms)
194 if any(charges):
195 atoms.set_initial_charges(charges)
197 if periodic.any():
198 atoms.set_cell(cell)
199 atoms.set_pbc(periodic)
200 if len(fix):
201 atoms.set_constraint([FixAtoms(indices=fix)] + fix_cart + fix_params)
202 else:
203 atoms.set_constraint(fix_cart + fix_params)
205 if fix_params and apply_constraints:
206 atoms.set_positions(atoms.get_positions())
207 return atoms
210def get_aims_header():
211 """Returns the header for aims input files"""
212 lines = ["#" + "=" * 79]
213 for line in [
214 "Created using the Atomic Simulation Environment (ASE)",
215 time.asctime(),
216 ]:
217 lines.append("# " + line + "\n")
218 return lines
221def _write_velocities_alias(args: List, kwargs: Dict[str, Any]) -> bool:
222 arg_position = 5
223 if len(args) > arg_position and args[arg_position]:
224 args[arg_position - 1] = True
225 elif kwargs.get("velocities", False):
226 if len(args) < arg_position:
227 kwargs["write_velocities"] = True
228 else:
229 args[arg_position - 1] = True
230 else:
231 return False
232 return True
235# Write aims geometry files
236@deprecated(
237 "Use of `velocities` is deprecated, please use `write_velocities`",
238 category=FutureWarning,
239 callback=_write_velocities_alias,
240)
241@writer
242def write_aims(
243 fd,
244 atoms,
245 scaled=False,
246 geo_constrain=False,
247 write_velocities=False,
248 velocities=False,
249 ghosts=None,
250 info_str=None,
251 wrap=False,
252):
253 """Method to write FHI-aims geometry files.
255 Writes the atoms positions and constraints (only FixAtoms is
256 supported at the moment).
258 Args:
259 fd: file object
260 File to output structure to
261 atoms: ase.atoms.Atoms
262 structure to output to the file
263 scaled: bool
264 If True use fractional coordinates instead of Cartesian coordinates
265 symmetry_block: list of str
266 List of geometric constraints as defined in:
267 :arxiv:`1908.01610`
268 write_velocities: bool
269 If True add the atomic velocity vectors to the file
270 velocities: bool
271 NOT AN ARRAY OF VELOCITIES, but the legacy version of
272 `write_velocities`
273 ghosts: list of Atoms
274 A list of ghost atoms for the system
275 info_str: str
276 A string to be added to the header of the file
277 wrap: bool
278 Wrap atom positions to cell before writing
280 .. deprecated:: 3.23.0
281 Use of ``velocities`` is deprecated, please use ``write_velocities``.
282 """
284 from ase.constraints import FixAtoms, FixCartesian
286 if scaled and not np.all(atoms.pbc):
287 raise ValueError(
288 "Requesting scaled for a calculation where scaled=True, but "
289 "the system is not periodic")
291 if geo_constrain:
292 if not scaled and np.all(atoms.pbc):
293 warnings.warn(
294 "Setting scaled to True because a symmetry_block is detected."
295 )
296 scaled = True
297 elif not np.all(atoms.pbc):
298 warnings.warn(
299 "Parameteric constraints can only be used in periodic systems."
300 )
301 geo_constrain = False
303 for line in get_aims_header():
304 fd.write(line + "\n")
306 # If writing additional information is requested via info_str:
307 if info_str is not None:
308 fd.write("\n# Additional information:\n")
309 if isinstance(info_str, list):
310 fd.write("\n".join([f"# {s}" for s in info_str]))
311 else:
312 fd.write(f"# {info_str}")
313 fd.write("\n")
315 fd.write("#=======================================================\n")
317 i = 0
318 if atoms.get_pbc().any():
319 for n, vector in enumerate(atoms.get_cell()):
320 fd.write("lattice_vector ")
321 for i in range(3):
322 fd.write("%16.16f " % vector[i])
323 fd.write("\n")
324 fix_cart = np.zeros([len(atoms), 3])
326 if atoms.constraints:
327 for constr in atoms.constraints:
328 if isinstance(constr, FixAtoms):
329 fix_cart[constr.index] = [1, 1, 1]
330 elif isinstance(constr, FixCartesian):
331 fix_cart[constr.index] = -constr.mask.astype(int) + 1
333 if ghosts is None:
334 ghosts = np.zeros(len(atoms))
335 else:
336 assert len(ghosts) == len(atoms)
338 wrap = wrap and not geo_constrain
339 scaled_positions = atoms.get_scaled_positions(wrap=wrap)
341 for i, atom in enumerate(atoms):
342 if ghosts[i] == 1:
343 atomstring = "empty "
344 elif scaled:
345 atomstring = "atom_frac "
346 else:
347 atomstring = "atom "
348 fd.write(atomstring)
349 if scaled:
350 for pos in scaled_positions[i]:
351 fd.write("%16.16f " % pos)
352 else:
353 for pos in atom.position:
354 fd.write("%16.16f " % pos)
355 fd.write(atom.symbol)
356 fd.write("\n")
357 # (1) all coords are constrained:
358 if fix_cart[i].all():
359 fd.write(" constrain_relaxation .true.\n")
360 # (2) some coords are constrained:
361 elif fix_cart[i].any():
362 xyz = fix_cart[i]
363 for n in range(3):
364 if xyz[n]:
365 fd.write(f" constrain_relaxation {'xyz'[n]}\n")
366 if atom.charge:
367 fd.write(" initial_charge %16.6f\n" % atom.charge)
368 if atom.magmom:
369 fd.write(" initial_moment %16.6f\n" % atom.magmom)
371 if write_velocities and atoms.get_velocities() is not None:
372 fd.write(
373 " velocity {:.16f} {:.16f} {:.16f}\n".format(
374 *atoms.get_velocities()[i] / v_unit
375 )
376 )
378 if geo_constrain:
379 for line in get_sym_block(atoms):
380 fd.write(line)
383def get_sym_block(atoms):
384 """Get symmetry block for Parametric constraints in atoms.constraints"""
385 from ase.constraints import (FixCartesianParametricRelations,
386 FixScaledParametricRelations)
388 # Initialize param/expressions lists
389 atomic_sym_params = []
390 lv_sym_params = []
391 atomic_param_constr = np.zeros((len(atoms),), dtype="<U100")
392 lv_param_constr = np.zeros((3,), dtype="<U100")
394 # Populate param/expressions list
395 for constr in atoms.constraints:
396 if isinstance(constr, FixScaledParametricRelations):
397 atomic_sym_params += constr.params
399 if np.any(atomic_param_constr[constr.indices] != ""):
400 warnings.warn(
401 "multiple parametric constraints defined for the same "
402 "atom, using the last one defined"
403 )
405 atomic_param_constr[constr.indices] = [
406 ", ".join(expression) for expression in constr.expressions
407 ]
408 elif isinstance(constr, FixCartesianParametricRelations):
409 lv_sym_params += constr.params
411 if np.any(lv_param_constr[constr.indices] != ""):
412 warnings.warn(
413 "multiple parametric constraints defined for the same "
414 "lattice vector, using the last one defined"
415 )
417 lv_param_constr[constr.indices] = [
418 ", ".join(expression) for expression in constr.expressions
419 ]
421 if np.all(atomic_param_constr == "") and np.all(lv_param_constr == ""):
422 return []
424 # Check Constraint Parameters
425 if len(atomic_sym_params) != len(np.unique(atomic_sym_params)):
426 warnings.warn(
427 "Some parameters were used across constraints, they will be "
428 "combined in the aims calculations"
429 )
430 atomic_sym_params = np.unique(atomic_sym_params)
432 if len(lv_sym_params) != len(np.unique(lv_sym_params)):
433 warnings.warn(
434 "Some parameters were used across constraints, they will be "
435 "combined in the aims calculations"
436 )
437 lv_sym_params = np.unique(lv_sym_params)
439 if np.any(atomic_param_constr == ""):
440 raise OSError(
441 "FHI-aims input files require all atoms have defined parametric "
442 "constraints"
443 )
445 cell_inds = np.where(lv_param_constr == "")[0]
446 for ind in cell_inds:
447 lv_param_constr[ind] = "{:.16f}, {:.16f}, {:.16f}".format(
448 *atoms.cell[ind])
450 n_atomic_params = len(atomic_sym_params)
451 n_lv_params = len(lv_sym_params)
452 n_total_params = n_atomic_params + n_lv_params
454 sym_block = []
455 if n_total_params > 0:
456 sym_block.append("#" + "=" * 55 + "\n")
457 sym_block.append("# Parametric constraints\n")
458 sym_block.append("#" + "=" * 55 + "\n")
459 sym_block.append(
460 "symmetry_n_params {:d} {:d} {:d}\n".format(
461 n_total_params, n_lv_params, n_atomic_params
462 )
463 )
464 sym_block.append(
465 "symmetry_params %s\n" % " ".join(lv_sym_params + atomic_sym_params)
466 )
468 for constr in lv_param_constr:
469 sym_block.append(f"symmetry_lv {constr:s}\n")
471 for constr in atomic_param_constr:
472 sym_block.append(f"symmetry_frac {constr:s}\n")
473 return sym_block
476def format_aims_control_parameter(key, value, format="%s"):
477 """Format a line for the aims control.in
479 Parameter
480 ---------
481 key: str
482 Name of the paramteter to format
483 value: Object
484 The value to pass to the parameter
485 format: str
486 string to format the the text as
488 Returns
489 -------
490 str
491 The properly formatted line for the aims control.in
492 """
493 return f"{key :35s}" + (format % value) + "\n"
496# Write aims control.in files
497@writer
498def write_control(fd, atoms, parameters, verbose_header=False):
499 """Write the control.in file for FHI-aims
500 Parameters
501 ----------
502 fd: str
503 The file object to write to
504 atoms: atoms.Atoms
505 The Atoms object for the requested calculation
506 parameters: dict
507 The dictionary of all paramters for the calculation
508 verbose_header: bool
509 If True then explcitly list the paramters used to generate the
510 control.in file inside the header
511 """
513 parameters = dict(parameters)
514 lim = "#" + "=" * 79
516 if parameters["xc"] == "LDA":
517 parameters["xc"] = "pw-lda"
519 cubes = parameters.pop("cubes", None)
521 for line in get_aims_header():
522 fd.write(line + "\n")
524 if verbose_header:
525 fd.write("# \n# List of parameters used to initialize the calculator:")
526 for p, v in parameters.items():
527 s = f"# {p}:{v}\n"
528 fd.write(s)
529 fd.write(lim + "\n")
531 assert not ("kpts" in parameters and "k_grid" in parameters)
532 assert not ("smearing" in parameters and "occupation_type" in parameters)
534 for key, value in parameters.items():
535 if key == "kpts":
536 mp = kpts2mp(atoms, parameters["kpts"])
537 dk = 0.5 - 0.5 / np.array(mp)
538 fd.write(
539 format_aims_control_parameter(
540 "k_grid",
541 tuple(mp),
542 "%d %d %d"))
543 fd.write(
544 format_aims_control_parameter(
545 "k_offset",
546 tuple(dk),
547 "%f %f %f"))
548 elif key in ("species_dir", "tier"):
549 continue
550 elif key == "plus_u":
551 continue
552 elif key == "smearing":
553 name = parameters["smearing"][0].lower()
554 if name == "fermi-dirac":
555 name = "fermi"
556 width = parameters["smearing"][1]
557 if name == "methfessel-paxton":
558 order = parameters["smearing"][2]
559 order = " %d" % order
560 else:
561 order = ""
563 fd.write(
564 format_aims_control_parameter(
565 "occupation_type", (name, width, order), "%s %f%s"
566 )
567 )
568 elif key == "output":
569 for output_type in value:
570 fd.write(format_aims_control_parameter(key, output_type, "%s"))
571 elif key == "vdw_correction_hirshfeld" and value:
572 fd.write(format_aims_control_parameter(key, "", "%s"))
573 elif isinstance(value, bool):
574 fd.write(
575 format_aims_control_parameter(
576 key, str(value).lower(), ".%s."))
577 elif isinstance(value, (tuple, list)):
578 fd.write(
579 format_aims_control_parameter(
580 key, " ".join([str(x) for x in value]), "%s"
581 )
582 )
583 elif isinstance(value, str):
584 fd.write(format_aims_control_parameter(key, value, "%s"))
585 else:
586 fd.write(format_aims_control_parameter(key, value, "%r"))
588 if cubes:
589 cubes.write(fd)
591 fd.write(lim + "\n\n")
593 # Get the species directory
594 species_dir = get_species_directory
595 # dicts are ordered as of python 3.7
596 species_array = np.array(list(dict.fromkeys(atoms.symbols)))
597 # Grab the tier specification from the parameters. THis may either
598 # be None, meaning the default should be used for all species, or a
599 # list of integers/None values giving a specific basis set size
600 # for each species in the calculation.
601 tier = parameters.pop("tier", None)
602 tier_array = np.full(len(species_array), tier)
603 # Path to species files for FHI-aims. In this files are specifications
604 # for the basis set sizes depending on which basis set tier is used.
605 species_dir = get_species_directory(parameters.get("species_dir"))
606 # Parse the species files for each species present in the calculation
607 # according to the tier of each species.
608 species_basis_dict = parse_species_path(
609 species_array=species_array, tier_array=tier_array,
610 species_dir=species_dir)
611 # Write the basis functions to be included for each species in the
612 # calculation into the control.in file (fd).
613 write_species(fd, species_basis_dict, parameters)
616def get_species_directory(species_dir=None):
617 """Get the directory where the basis set information is stored
619 If the requested directory does not exist then raise an Error
621 Parameters
622 ----------
623 species_dir: str
624 Requested directory to find the basis set info from. E.g.
625 `~/aims2022/FHIaims/species_defaults/defaults_2020/light`.
627 Returns
628 -------
629 Path
630 The Path to the requested or default species directory.
632 Raises
633 ------
634 RuntimeError
635 If both the requested directory and the default one is not defined
636 or does not exit.
637 """
638 if species_dir is None:
639 species_dir = os.environ.get("AIMS_SPECIES_DIR")
641 if species_dir is None:
642 raise RuntimeError(
643 "Missing species directory! Use species_dir "
644 + "parameter or set $AIMS_SPECIES_DIR environment variable."
645 )
647 species_path = Path(species_dir)
648 if not species_path.exists():
649 raise RuntimeError(
650 f"The requested species_dir {species_dir} does not exist")
652 return species_path
655def write_species(control_file_descriptor, species_basis_dict, parameters):
656 """Write species for the calculation depending on basis set size.
658 The calculation should include certain basis set size function depending
659 on the numerical settings (light, tight, really tight) and the basis set
660 size (minimal, tier1, tier2, tier3, tier4). If the basis set size is not
661 given then a 'standard' basis set size is used for each numerical setting.
662 The species files are defined according to these standard basis set sizes
663 for the numerical settings in the FHI-aims repository.
665 Note, for FHI-aims in ASE, we don't explicitly give the numerical setting.
666 Instead we include the numerical setting in the species path: e.g.
667 `~/aims2022/FHIaims/species_defaults/defaults_2020/light` this path has
668 `light`, the numerical setting, as the last folder in the path.
670 Example - a basis function might be commented in the standard basis set size
671 such as "# hydro 4 f 7.4" and this basis function should be
672 uncommented for another basis set size such as tier4.
674 Args:
675 control_file_descriptor: File descriptor for the control.in file into
676 which we need to write relevant basis functions to be included for
677 the calculation.
678 species_basis_dict: Dictionary where keys as the species symbols and
679 each value is a single string containing all the basis functions
680 to be included in the caclculation.
681 parameters: Calculation parameters as a dict.
682 """
683 # Now for every species (key) in the species_basis_dict, save the
684 # relevant basis functions (values) from the species_basis_dict, by
685 # writing to the file handle (species_file_descriptor) given to this
686 # function.
687 for species_symbol, basis_set_text in species_basis_dict.items():
688 control_file_descriptor.write(basis_set_text)
689 if parameters.get("plus_u") is not None:
690 if species_symbol in parameters.plus_u:
691 control_file_descriptor.write(
692 f"plus_u {parameters.plus_u[species_symbol]} \n")
695def parse_species_path(species_array, tier_array, species_dir):
696 """Parse the species files for each species according to the tier given.
698 Args:
699 species_array: An array of species/element symbols present in the unit
700 cell (e.g. ['C', 'H'].)
701 tier_array: An array of None/integer values which define which basis
702 set size to use for each species/element in the calcualtion.
703 species_dir: Directory containing FHI-aims species files.
705 Returns:
706 Dictionary containing species as keys and the basis set specification
707 for each species as text as the value for the key.
708 """
709 if len(species_array) != len(tier_array):
710 raise ValueError(
711 f"The species array length: {len(species_array)}, "
712 f"is not the same as the tier_array length: {len(tier_array)}")
714 species_basis_dict = {}
716 for symbol, tier in zip(species_array, tier_array):
717 path = species_dir / f"{atomic_numbers[symbol]:02}_{symbol}_default"
718 # Open the species file:
719 with open(path, encoding="utf8") as species_file_handle:
720 # Read the species file into a string.
721 species_file_str = species_file_handle.read()
722 species_basis_dict[symbol] = manipulate_tiers(
723 species_file_str, tier)
724 return species_basis_dict
727def manipulate_tiers(species_string: str, tier: Union[None, int] = 1):
728 """Adds basis set functions based on the tier value.
730 This function takes in the species file as a string, it then searches
731 for relevant basis functions based on the tier value to include in a new
732 string that is returned.
734 Args:
735 species_string: species file (default) for a given numerical setting
736 (light, tight, really tight) given as a string.
737 tier: The basis set size. This will dictate which basis set functions
738 are included in the returned string.
740 Returns:
741 Basis set functions defined by the tier as a string.
742 """
743 if tier is None: # Then we use the default species file untouched.
744 return species_string
745 tier_pattern = r"(# \".* tier\" .*|# +Further.*)"
746 top, *tiers = re.split(tier_pattern, species_string)
747 tier_comments = tiers[::2]
748 tier_basis = tiers[1::2]
749 assert len(
750 tier_comments) == len(tier_basis), "Something wrong with splitting"
751 n_tiers = len(tier_comments)
752 assert tier <= n_tiers, f"Only {n_tiers} tiers available, you choose {tier}"
753 string_new = top
754 for i, (c, b) in enumerate(zip(tier_comments, tier_basis)):
755 b = re.sub(r"\n( *for_aux| *hydro| *ionic| *confined)", r"\n#\g<1>", b)
756 if i < tier:
757 b = re.sub(
758 r"\n#( *for_aux| *hydro| *ionic| *confined)", r"\n\g<1>", b)
759 string_new += c + b
760 return string_new
763# Read aims.out files
764scalar_property_to_line_key = {
765 "free_energy": ["| Electronic free energy"],
766 "number_of_iterations": ["| Number of self-consistency cycles"],
767 "magnetic_moment": ["N_up - N_down"],
768 "n_atoms": ["| Number of atoms"],
769 "n_bands": [
770 "Number of Kohn-Sham states",
771 "Reducing total number of Kohn-Sham states",
772 "Reducing total number of Kohn-Sham states",
773 ],
774 "n_electrons": ["The structure contains"],
775 "n_kpts": ["| Number of k-points"],
776 "n_spins": ["| Number of spin channels"],
777 "electronic_temp": ["Occupation type:"],
778 "fermi_energy": ["| Chemical potential (Fermi level)"],
779}
782class AimsOutChunk:
783 """Base class for AimsOutChunks"""
785 def __init__(self, lines):
786 """Constructor
788 Parameters
789 ----------
790 lines: list of str
791 The set of lines from the output file the encompasses either
792 a single structure within a trajectory or
793 general information about the calculation (header)
794 """
795 self.lines = lines
797 def reverse_search_for(self, keys, line_start=0):
798 """Find the last time one of the keys appears in self.lines
800 Parameters
801 ----------
802 keys: list of str
803 The key strings to search for in self.lines
804 line_start: int
805 The lowest index to search for in self.lines
807 Returns
808 -------
809 int
810 The last time one of the keys appears in self.lines
811 """
812 for ll, line in enumerate(self.lines[line_start:][::-1]):
813 if any(key in line for key in keys):
814 return len(self.lines) - ll - 1
816 return LINE_NOT_FOUND
818 def search_for_all(self, key, line_start=0, line_end=-1):
819 """Find the all times the key appears in self.lines
821 Parameters
822 ----------
823 key: str
824 The key string to search for in self.lines
825 line_start: int
826 The first line to start the search from
827 line_end: int
828 The last line to end the search at
830 Returns
831 -------
832 list of ints
833 All times the key appears in the lines
834 """
835 line_index = []
836 for ll, line in enumerate(self.lines[line_start:line_end]):
837 if key in line:
838 line_index.append(ll + line_start)
839 return line_index
841 def parse_scalar(self, property):
842 """Parse a scalar property from the chunk
844 Parameters
845 ----------
846 property: str
847 The property key to parse
849 Returns
850 -------
851 float
852 The scalar value of the property
853 """
854 line_start = self.reverse_search_for(
855 scalar_property_to_line_key[property])
857 if line_start == LINE_NOT_FOUND:
858 return None
860 line = self.lines[line_start]
861 return float(line.split(":")[-1].strip().split()[0])
864class AimsOutHeaderChunk(AimsOutChunk):
865 """The header of the aims.out file containint general information"""
867 def __init__(self, lines):
868 """Constructor
870 Parameters
871 ----------
872 lines: list of str
873 The lines inside the aims.out header
874 """
875 super().__init__(lines)
876 self._k_points = None
877 self._k_point_weights = None
879 @lazyproperty
880 def constraints(self):
881 """Parse the constraints from the aims.out file
883 Constraints for the lattice vectors are not supported.
884 """
886 line_inds = self.search_for_all("Found relaxation constraint for atom")
887 if len(line_inds) == 0:
888 return []
890 fix = []
891 fix_cart = []
892 for ll in line_inds:
893 line = self.lines[ll]
894 xyz = [0, 0, 0]
895 ind = int(line.split()[5][:-1]) - 1
896 if "All coordinates fixed" in line:
897 if ind not in fix:
898 fix.append(ind)
899 if "coordinate fixed" in line:
900 coord = line.split()[6]
901 if coord == "x":
902 xyz[0] = 1
903 elif coord == "y":
904 xyz[1] = 1
905 elif coord == "z":
906 xyz[2] = 1
907 keep = True
908 for n, c in enumerate(fix_cart):
909 if ind == c.index:
910 keep = False
911 break
912 if keep:
913 fix_cart.append(FixCartesian(ind, xyz))
914 else:
915 fix_cart[n].mask[xyz.index(1)] = 0
916 if len(fix) > 0:
917 fix_cart.append(FixAtoms(indices=fix))
919 return fix_cart
921 @lazyproperty
922 def initial_cell(self):
923 """Parse the initial cell from the aims.out file"""
924 line_start = self.reverse_search_for(["| Unit cell:"])
925 if line_start == LINE_NOT_FOUND:
926 return None
928 return [
929 [float(inp) for inp in line.split()[-3:]]
930 for line in self.lines[line_start + 1:line_start + 4]
931 ]
933 @lazyproperty
934 def initial_atoms(self):
935 """Create an atoms object for the initial geometry.in structure
936 from the aims.out file"""
937 line_start = self.reverse_search_for(["Atomic structure:"])
938 if line_start == LINE_NOT_FOUND:
939 raise AimsParseError(
940 "No information about the structure in the chunk.")
942 line_start += 2
944 cell = self.initial_cell
945 positions = np.zeros((self.n_atoms, 3))
946 symbols = [""] * self.n_atoms
947 for ll, line in enumerate(
948 self.lines[line_start:line_start + self.n_atoms]):
949 inp = line.split()
950 positions[ll, :] = [float(pos) for pos in inp[4:7]]
951 symbols[ll] = inp[3]
953 atoms = Atoms(symbols=symbols, positions=positions)
955 if cell:
956 atoms.set_cell(cell)
957 atoms.set_pbc([True, True, True])
958 atoms.set_constraint(self.constraints)
960 return atoms
962 @lazyproperty
963 def is_md(self):
964 """Determine if calculation is a molecular dynamics calculation"""
965 return LINE_NOT_FOUND != self.reverse_search_for(
966 ["Complete information for previous time-step:"]
967 )
969 @lazyproperty
970 def is_relaxation(self):
971 """Determine if the calculation is a geometry optimization or not"""
972 return LINE_NOT_FOUND != self.reverse_search_for(
973 ["Geometry relaxation:"])
975 @lazymethod
976 def _parse_k_points(self):
977 """Get the list of k-points used in the calculation"""
978 n_kpts = self.parse_scalar("n_kpts")
979 if n_kpts is None:
980 return {
981 "k_points": None,
982 "k_point_weights": None,
983 }
984 n_kpts = int(n_kpts)
986 line_start = self.reverse_search_for(["| K-points in task"])
987 line_end = self.reverse_search_for(["| k-point:"])
988 if (
989 (line_start == LINE_NOT_FOUND)
990 or (line_end == LINE_NOT_FOUND)
991 or (line_end - line_start != n_kpts)
992 ):
993 return {
994 "k_points": None,
995 "k_point_weights": None,
996 }
998 k_points = np.zeros((n_kpts, 3))
999 k_point_weights = np.zeros(n_kpts)
1000 for kk, line in enumerate(self.lines[line_start + 1:line_end + 1]):
1001 k_points[kk] = [float(inp) for inp in line.split()[4:7]]
1002 k_point_weights[kk] = float(line.split()[-1])
1004 return {
1005 "k_points": k_points,
1006 "k_point_weights": k_point_weights,
1007 }
1009 @lazyproperty
1010 def n_atoms(self):
1011 """The number of atoms for the material"""
1012 n_atoms = self.parse_scalar("n_atoms")
1013 if n_atoms is None:
1014 raise AimsParseError(
1015 "No information about the number of atoms in the header."
1016 )
1017 return int(n_atoms)
1019 @lazyproperty
1020 def n_bands(self):
1021 """The number of Kohn-Sham states for the chunk"""
1022 line_start = self.reverse_search_for(
1023 scalar_property_to_line_key["n_bands"])
1025 if line_start == LINE_NOT_FOUND:
1026 raise AimsParseError(
1027 "No information about the number of Kohn-Sham states "
1028 "in the header.")
1030 line = self.lines[line_start]
1031 if "| Number of Kohn-Sham states" in line:
1032 return int(line.split(":")[-1].strip().split()[0])
1034 return int(line.split()[-1].strip()[:-1])
1036 @lazyproperty
1037 def n_electrons(self):
1038 """The number of electrons for the chunk"""
1039 line_start = self.reverse_search_for(
1040 scalar_property_to_line_key["n_electrons"])
1042 if line_start == LINE_NOT_FOUND:
1043 raise AimsParseError(
1044 "No information about the number of electrons in the header."
1045 )
1047 line = self.lines[line_start]
1048 return int(float(line.split()[-2]))
1050 @lazyproperty
1051 def n_k_points(self):
1052 """The number of k_ppoints for the calculation"""
1053 n_kpts = self.parse_scalar("n_kpts")
1054 if n_kpts is None:
1055 return None
1057 return int(n_kpts)
1059 @lazyproperty
1060 def n_spins(self):
1061 """The number of spin channels for the chunk"""
1062 n_spins = self.parse_scalar("n_spins")
1063 if n_spins is None:
1064 raise AimsParseError(
1065 "No information about the number of spin "
1066 "channels in the header.")
1067 return int(n_spins)
1069 @lazyproperty
1070 def electronic_temperature(self):
1071 """The electronic temperature for the chunk"""
1072 line_start = self.reverse_search_for(
1073 scalar_property_to_line_key["electronic_temp"]
1074 )
1075 if line_start == LINE_NOT_FOUND:
1076 return 0.10
1078 line = self.lines[line_start]
1079 return float(line.split("=")[-1].strip().split()[0])
1081 @lazyproperty
1082 def k_points(self):
1083 """All k-points listed in the calculation"""
1084 return self._parse_k_points()["k_points"]
1086 @lazyproperty
1087 def k_point_weights(self):
1088 """The k-point weights for the calculation"""
1089 return self._parse_k_points()["k_point_weights"]
1091 @lazyproperty
1092 def header_summary(self):
1093 """Dictionary summarizing the information inside the header"""
1094 return {
1095 "initial_atoms": self.initial_atoms,
1096 "initial_cell": self.initial_cell,
1097 "constraints": self.constraints,
1098 "is_relaxation": self.is_relaxation,
1099 "is_md": self.is_md,
1100 "n_atoms": self.n_atoms,
1101 "n_bands": self.n_bands,
1102 "n_electrons": self.n_electrons,
1103 "n_spins": self.n_spins,
1104 "electronic_temperature": self.electronic_temperature,
1105 "n_k_points": self.n_k_points,
1106 "k_points": self.k_points,
1107 "k_point_weights": self.k_point_weights,
1108 }
1111class AimsOutCalcChunk(AimsOutChunk):
1112 """A part of the aims.out file correponding to a single structure"""
1114 def __init__(self, lines, header):
1115 """Constructor
1117 Parameters
1118 ----------
1119 lines: list of str
1120 The lines used for the structure
1121 header: dict
1122 A summary of the relevant information from the aims.out header
1123 """
1124 super().__init__(lines)
1125 self._header = header.header_summary
1127 @lazymethod
1128 def _parse_atoms(self):
1129 """Create an atoms object for the subsequent structures
1130 calculated in the aims.out file"""
1131 start_keys = [
1132 "Atomic structure (and velocities) as used in the preceding "
1133 "time step",
1134 "Updated atomic structure",
1135 "Atomic structure that was used in the preceding time step of "
1136 "the wrapper",
1137 ]
1138 line_start = self.reverse_search_for(start_keys)
1139 if line_start == LINE_NOT_FOUND:
1140 return self.initial_atoms
1142 line_start += 1
1144 line_end = self.reverse_search_for(
1145 [
1146 'Next atomic structure:',
1147 'Writing the current geometry to file "geometry.in.next_step"'
1148 ],
1149 line_start
1150 )
1151 if line_end == LINE_NOT_FOUND:
1152 line_end = len(self.lines)
1154 cell = []
1155 velocities = []
1156 atoms = Atoms()
1157 for line in self.lines[line_start:line_end]:
1158 if "lattice_vector " in line:
1159 cell.append([float(inp) for inp in line.split()[1:]])
1160 elif "atom " in line:
1161 line_split = line.split()
1162 atoms.append(Atom(line_split[4], tuple(
1163 [float(inp) for inp in line_split[1:4]])))
1164 elif "velocity " in line:
1165 velocities.append([float(inp) for inp in line.split()[1:]])
1167 assert len(atoms) == self.n_atoms
1168 assert (len(velocities) == self.n_atoms) or (len(velocities) == 0)
1169 if len(cell) == 3:
1170 atoms.set_cell(np.array(cell))
1171 atoms.set_pbc([True, True, True])
1172 elif len(cell) != 0:
1173 raise AimsParseError(
1174 "Parsed geometry has incorrect number of lattice vectors."
1175 )
1177 if len(velocities) > 0:
1178 atoms.set_velocities(np.array(velocities))
1179 atoms.set_constraint(self.constraints)
1181 return atoms
1183 @lazyproperty
1184 def forces(self):
1185 """Parse the forces from the aims.out file"""
1186 line_start = self.reverse_search_for(["Total atomic forces"])
1187 if line_start == LINE_NOT_FOUND:
1188 return
1190 line_start += 1
1192 return np.array(
1193 [
1194 [float(inp) for inp in line.split()[-3:]]
1195 for line in self.lines[line_start:line_start + self.n_atoms]
1196 ]
1197 )
1199 @lazyproperty
1200 def stresses(self):
1201 """Parse the stresses from the aims.out file"""
1202 line_start = self.reverse_search_for(
1203 ["Per atom stress (eV) used for heat flux calculation"]
1204 )
1205 if line_start == LINE_NOT_FOUND:
1206 return None
1207 line_start += 3
1208 stresses = []
1209 for line in self.lines[line_start:line_start + self.n_atoms]:
1210 xx, yy, zz, xy, xz, yz = (float(d) for d in line.split()[2:8])
1211 stresses.append([xx, yy, zz, yz, xz, xy])
1213 return np.array(stresses)
1215 @lazyproperty
1216 def stress(self):
1217 """Parse the stress from the aims.out file"""
1218 from ase.stress import full_3x3_to_voigt_6_stress
1220 line_start = self.reverse_search_for(
1221 [
1222 "Analytical stress tensor - Symmetrized",
1223 "Numerical stress tensor",
1224 ]
1226 ) # Offest to relevant lines
1227 if line_start == LINE_NOT_FOUND:
1228 return
1230 stress = [
1231 [float(inp) for inp in line.split()[2:5]]
1232 for line in self.lines[line_start + 5:line_start + 8]
1233 ]
1234 return full_3x3_to_voigt_6_stress(stress)
1236 @lazyproperty
1237 def is_metallic(self):
1238 """Checks the outputfile to see if the chunk corresponds
1239 to a metallic system"""
1240 line_start = self.reverse_search_for(
1241 ["material is metallic within the approximate finite "
1242 "broadening function (occupation_type)"])
1243 return line_start != LINE_NOT_FOUND
1245 @lazyproperty
1246 def energy(self):
1247 """Parse the energy from the aims.out file"""
1248 atoms = self._parse_atoms()
1250 if np.all(atoms.pbc) and self.is_metallic:
1251 line_ind = self.reverse_search_for(["Total energy corrected"])
1252 else:
1253 line_ind = self.reverse_search_for(["Total energy uncorrected"])
1254 if line_ind == LINE_NOT_FOUND:
1255 raise AimsParseError("No energy is associated with the structure.")
1257 return float(self.lines[line_ind].split()[5])
1259 @lazyproperty
1260 def dipole(self):
1261 """Parse the electric dipole moment from the aims.out file."""
1262 line_start = self.reverse_search_for(["Total dipole moment [eAng]"])
1263 if line_start == LINE_NOT_FOUND:
1264 return
1266 line = self.lines[line_start]
1267 return np.array([float(inp) for inp in line.split()[6:9]])
1269 @lazyproperty
1270 def dielectric_tensor(self):
1271 """Parse the dielectric tensor from the aims.out file"""
1272 line_start = self.reverse_search_for(["PARSE DFPT_dielectric_tensor"])
1273 if line_start == LINE_NOT_FOUND:
1274 return
1276 # we should find the tensor in the next three lines:
1277 lines = self.lines[line_start + 1:line_start + 4]
1279 # make ndarray and return
1280 return np.array([np.fromstring(line, sep=' ') for line in lines])
1282 @lazyproperty
1283 def polarization(self):
1284 """ Parse the polarization vector from the aims.out file"""
1285 line_start = self.reverse_search_for(["| Cartesian Polarization"])
1286 if line_start == LINE_NOT_FOUND:
1287 return
1288 line = self.lines[line_start]
1289 return np.array([float(s) for s in line.split()[-3:]])
1291 @lazymethod
1292 def _parse_hirshfeld(self):
1293 """Parse the Hirshfled charges volumes, and dipole moments from the
1294 ouput"""
1295 atoms = self._parse_atoms()
1297 line_start = self.reverse_search_for(
1298 ["Performing Hirshfeld analysis of fragment charges and moments."]
1299 )
1300 if line_start == LINE_NOT_FOUND:
1301 return {
1302 "charges": None,
1303 "volumes": None,
1304 "atomic_dipoles": None,
1305 "dipole": None,
1306 }
1308 line_inds = self.search_for_all("Hirshfeld charge", line_start, -1)
1309 hirshfeld_charges = np.array(
1310 [float(self.lines[ind].split(":")[1]) for ind in line_inds]
1311 )
1313 line_inds = self.search_for_all("Hirshfeld volume", line_start, -1)
1314 hirshfeld_volumes = np.array(
1315 [float(self.lines[ind].split(":")[1]) for ind in line_inds]
1316 )
1318 line_inds = self.search_for_all(
1319 "Hirshfeld dipole vector", line_start, -1)
1320 hirshfeld_atomic_dipoles = np.array(
1321 [
1322 [float(inp) for inp in self.lines[ind].split(":")[1].split()]
1323 for ind in line_inds
1324 ]
1325 )
1327 if not np.any(atoms.pbc):
1328 hirshfeld_dipole = np.sum(
1329 hirshfeld_charges.reshape((-1, 1)) * atoms.get_positions(),
1330 axis=1,
1331 )
1332 else:
1333 hirshfeld_dipole = None
1334 return {
1335 "charges": hirshfeld_charges,
1336 "volumes": hirshfeld_volumes,
1337 "atomic_dipoles": hirshfeld_atomic_dipoles,
1338 "dipole": hirshfeld_dipole,
1339 }
1341 @lazymethod
1342 def _parse_eigenvalues(self):
1343 """Parse the eigenvalues and occupancies of the system. If eigenvalue
1344 for a particular k-point is not present in the output file
1345 then set it to np.nan
1346 """
1348 atoms = self._parse_atoms()
1350 line_start = self.reverse_search_for(["Writing Kohn-Sham eigenvalues."])
1351 if line_start == LINE_NOT_FOUND:
1352 return {"eigenvalues": None, "occupancies": None}
1354 line_end_1 = self.reverse_search_for(
1355 ["Self-consistency cycle converged."], line_start
1356 )
1357 line_end_2 = self.reverse_search_for(
1358 [
1359 "What follows are estimated values for band gap, "
1360 "HOMO, LUMO, etc.",
1361 "Current spin moment of the entire structure :",
1362 "Highest occupied state (VBM)"
1363 ],
1364 line_start,
1365 )
1366 if line_end_1 == LINE_NOT_FOUND:
1367 line_end = line_end_2
1368 elif line_end_2 == LINE_NOT_FOUND:
1369 line_end = line_end_1
1370 else:
1371 line_end = min(line_end_1, line_end_2)
1373 n_kpts = self.n_k_points if np.all(atoms.pbc) else 1
1374 if n_kpts is None:
1375 return {"eigenvalues": None, "occupancies": None}
1377 eigenvalues = np.full((n_kpts, self.n_bands, self.n_spins), np.nan)
1378 occupancies = np.full((n_kpts, self.n_bands, self.n_spins), np.nan)
1380 occupation_block_start = self.search_for_all(
1381 "State Occupation Eigenvalue [Ha] Eigenvalue [eV]",
1382 line_start,
1383 line_end,
1384 )
1385 kpt_def = self.search_for_all("K-point: ", line_start, line_end)
1387 if len(kpt_def) > 0:
1388 kpt_inds = [int(self.lines[ll].split()[1]) - 1 for ll in kpt_def]
1389 elif (self.n_k_points is None) or (self.n_k_points == 1):
1390 kpt_inds = [0]
1391 else:
1392 raise ParseError("Cannot find k-point definitions")
1394 assert len(kpt_inds) == len(occupation_block_start)
1395 spins = [0] * len(occupation_block_start)
1397 if self.n_spins == 2:
1398 spin_def = self.search_for_all("Spin-", line_start, line_end)
1399 assert len(spin_def) == len(occupation_block_start)
1401 spins = [int("Spin-down eigenvalues:" in self.lines[ll])
1402 for ll in spin_def]
1404 for occ_start, kpt_ind, spin in zip(
1405 occupation_block_start, kpt_inds, spins):
1406 for ll, line in enumerate(
1407 self.lines[occ_start + 1:occ_start + self.n_bands + 1]
1408 ):
1409 if "***" in line:
1410 warn_msg = f"The {ll+1}th eigenvalue for the "
1411 "{kpt_ind+1}th k-point and {spin}th channels could "
1412 "not be read (likely too large to be printed "
1413 "in the output file)"
1414 warnings.warn(warn_msg)
1415 continue
1416 split_line = line.split()
1417 eigenvalues[kpt_ind, ll, spin] = float(split_line[3])
1418 occupancies[kpt_ind, ll, spin] = float(split_line[1])
1419 return {"eigenvalues": eigenvalues, "occupancies": occupancies}
1421 @lazyproperty
1422 def atoms(self):
1423 """Convert AimsOutChunk to Atoms object and add all non-standard
1424outputs to atoms.info"""
1425 atoms = self._parse_atoms()
1427 atoms.calc = SinglePointDFTCalculator(
1428 atoms,
1429 energy=self.energy,
1430 free_energy=self.free_energy,
1431 forces=self.forces,
1432 stress=self.stress,
1433 stresses=self.stresses,
1434 magmom=self.magmom,
1435 dipole=self.dipole,
1436 dielectric_tensor=self.dielectric_tensor,
1437 polarization=self.polarization,
1438 )
1439 return atoms
1441 @property
1442 def results(self):
1443 """Convert an AimsOutChunk to a Results Dictionary"""
1444 results = {
1445 "energy": self.energy,
1446 "free_energy": self.free_energy,
1447 "forces": self.forces,
1448 "stress": self.stress,
1449 "stresses": self.stresses,
1450 "magmom": self.magmom,
1451 "dipole": self.dipole,
1452 "fermi_energy": self.E_f,
1453 "n_iter": self.n_iter,
1454 "hirshfeld_charges": self.hirshfeld_charges,
1455 "hirshfeld_dipole": self.hirshfeld_dipole,
1456 "hirshfeld_volumes": self.hirshfeld_volumes,
1457 "hirshfeld_atomic_dipoles": self.hirshfeld_atomic_dipoles,
1458 "eigenvalues": self.eigenvalues,
1459 "occupancies": self.occupancies,
1460 "dielectric_tensor": self.dielectric_tensor,
1461 "polarization": self.polarization,
1462 }
1464 return {
1465 key: value for key,
1466 value in results.items() if value is not None}
1468 # Properties from the aims.out header
1469 @lazyproperty
1470 def initial_atoms(self):
1471 """The initial structure defined in the geoemtry.in file"""
1472 return self._header["initial_atoms"]
1474 @lazyproperty
1475 def initial_cell(self):
1476 """The initial lattice vectors defined in the geoemtry.in file"""
1477 return self._header["initial_cell"]
1479 @lazyproperty
1480 def constraints(self):
1481 """The relaxation constraints for the calculation"""
1482 return self._header["constraints"]
1484 @lazyproperty
1485 def n_atoms(self):
1486 """The number of atoms for the material"""
1487 return self._header["n_atoms"]
1489 @lazyproperty
1490 def n_bands(self):
1491 """The number of Kohn-Sham states for the chunk"""
1492 return self._header["n_bands"]
1494 @lazyproperty
1495 def n_electrons(self):
1496 """The number of electrons for the chunk"""
1497 return self._header["n_electrons"]
1499 @lazyproperty
1500 def n_spins(self):
1501 """The number of spin channels for the chunk"""
1502 return self._header["n_spins"]
1504 @lazyproperty
1505 def electronic_temperature(self):
1506 """The electronic temperature for the chunk"""
1507 return self._header["electronic_temperature"]
1509 @lazyproperty
1510 def n_k_points(self):
1511 """The number of electrons for the chunk"""
1512 return self._header["n_k_points"]
1514 @lazyproperty
1515 def k_points(self):
1516 """The number of spin channels for the chunk"""
1517 return self._header["k_points"]
1519 @lazyproperty
1520 def k_point_weights(self):
1521 """k_point_weights electronic temperature for the chunk"""
1522 return self._header["k_point_weights"]
1524 @lazyproperty
1525 def free_energy(self):
1526 """The free energy for the chunk"""
1527 return self.parse_scalar("free_energy")
1529 @lazyproperty
1530 def n_iter(self):
1531 """The number of SCF iterations needed to converge the SCF cycle for
1532the chunk"""
1533 return self.parse_scalar("number_of_iterations")
1535 @lazyproperty
1536 def magmom(self):
1537 """The magnetic moment for the chunk"""
1538 return self.parse_scalar("magnetic_moment")
1540 @lazyproperty
1541 def E_f(self):
1542 """The Fermi energy for the chunk"""
1543 return self.parse_scalar("fermi_energy")
1545 @lazyproperty
1546 def converged(self):
1547 """True if the chunk is a fully converged final structure"""
1548 return (len(self.lines) > 0) and ("Have a nice day." in self.lines[-5:])
1550 @lazyproperty
1551 def hirshfeld_charges(self):
1552 """The Hirshfeld charges for the chunk"""
1553 return self._parse_hirshfeld()["charges"]
1555 @lazyproperty
1556 def hirshfeld_atomic_dipoles(self):
1557 """The Hirshfeld atomic dipole moments for the chunk"""
1558 return self._parse_hirshfeld()["atomic_dipoles"]
1560 @lazyproperty
1561 def hirshfeld_volumes(self):
1562 """The Hirshfeld volume for the chunk"""
1563 return self._parse_hirshfeld()["volumes"]
1565 @lazyproperty
1566 def hirshfeld_dipole(self):
1567 """The Hirshfeld systematic dipole moment for the chunk"""
1568 atoms = self._parse_atoms()
1570 if not np.any(atoms.pbc):
1571 return self._parse_hirshfeld()["dipole"]
1573 return None
1575 @lazyproperty
1576 def eigenvalues(self):
1577 """All outputted eigenvalues for the system"""
1578 return self._parse_eigenvalues()["eigenvalues"]
1580 @lazyproperty
1581 def occupancies(self):
1582 """All outputted occupancies for the system"""
1583 return self._parse_eigenvalues()["occupancies"]
1586def get_header_chunk(fd):
1587 """Returns the header information from the aims.out file"""
1588 header = []
1589 line = ""
1591 # Stop the header once the first SCF cycle begins
1592 while (
1593 "Convergence: q app. | density | eigen (eV) | Etot (eV)"
1594 not in line
1595 and "Begin self-consistency iteration #" not in line
1596 ):
1597 try:
1598 line = next(fd).strip() # Raises StopIteration on empty file
1599 except StopIteration:
1600 raise ParseError(
1601 "No SCF steps present, calculation failed at setup."
1602 )
1604 header.append(line)
1605 return AimsOutHeaderChunk(header)
1608def get_aims_out_chunks(fd, header_chunk):
1609 """Yield unprocessed chunks (header, lines) for each AimsOutChunk image."""
1610 try:
1611 line = next(fd).strip() # Raises StopIteration on empty file
1612 except StopIteration:
1613 return
1615 # If the calculation is relaxation the updated structural information
1616 # occurs before the re-initialization
1617 if header_chunk.is_relaxation:
1618 chunk_end_line = (
1619 "Geometry optimization: Attempting to predict improved coordinates."
1620 )
1621 else:
1622 chunk_end_line = "Begin self-consistency loop: Re-initialization"
1624 # If SCF is not converged then do not treat the next chunk_end_line as a
1625 # new chunk until after the SCF is re-initialized
1626 ignore_chunk_end_line = False
1627 while True:
1628 try:
1629 line = next(fd).strip() # Raises StopIteration on empty file
1630 except StopIteration:
1631 break
1633 lines = []
1634 while chunk_end_line not in line or ignore_chunk_end_line:
1635 lines.append(line)
1636 # If SCF cycle not converged or numerical stresses are requested,
1637 # don't end chunk on next Re-initialization
1638 patterns = [
1639 (
1640 "Self-consistency cycle not yet converged -"
1641 " restarting mixer to attempt better convergence."
1642 ),
1643 (
1644 "Components of the stress tensor (for mathematical "
1645 "background see comments in numerical_stress.f90)."
1646 ),
1647 "Calculation of numerical stress completed",
1648 ]
1649 if any(pattern in line for pattern in patterns):
1650 ignore_chunk_end_line = True
1651 elif "Begin self-consistency loop: Re-initialization" in line:
1652 ignore_chunk_end_line = False
1654 try:
1655 line = next(fd).strip()
1656 except StopIteration:
1657 break
1659 yield AimsOutCalcChunk(lines, header_chunk)
1662def check_convergence(chunks, non_convergence_ok=False):
1663 """Check if the aims output file is for a converged calculation
1665 Parameters
1666 ----------
1667 chunks: list of AimsOutChunks
1668 The list of chunks for the aims calculations
1669 non_convergence_ok: bool
1670 True if it is okay for the calculation to not be converged
1672 Returns
1673 -------
1674 bool
1675 True if the calculation is converged
1676 """
1677 if not non_convergence_ok and not chunks[-1].converged:
1678 raise ParseError("The calculation did not complete successfully")
1679 return True
1682@reader
1683def read_aims_output(fd, index=-1, non_convergence_ok=False):
1684 """Import FHI-aims output files with all data available, i.e.
1685 relaxations, MD information, force information etc etc etc."""
1686 header_chunk = get_header_chunk(fd)
1687 chunks = list(get_aims_out_chunks(fd, header_chunk))
1688 check_convergence(chunks, non_convergence_ok)
1690 # Relaxations have an additional footer chunk due to how it is split
1691 if header_chunk.is_relaxation:
1692 images = [chunk.atoms for chunk in chunks[:-1]]
1693 else:
1694 images = [chunk.atoms for chunk in chunks]
1695 return images[index]
1698@reader
1699def read_aims_results(fd, index=-1, non_convergence_ok=False):
1700 """Import FHI-aims output files and summarize all relevant information
1701 into a dictionary"""
1702 header_chunk = get_header_chunk(fd)
1703 chunks = list(get_aims_out_chunks(fd, header_chunk))
1704 check_convergence(chunks, non_convergence_ok)
1706 # Relaxations have an additional footer chunk due to how it is split
1707 if header_chunk.is_relaxation and (index == -1):
1708 return chunks[-2].results
1710 return chunks[index].results