Coverage for local_installation/dynasor/trajectory/extxyz_trajectory_reader.py: 89%
74 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-11-30 21:04 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-11-30 21:04 +0000
1import concurrent.futures
2from ase.io.extxyz import ixyzchunks
3from dynasor.trajectory.abstract_trajectory_reader import AbstractTrajectoryReader
4from dynasor.trajectory.trajectory_frame import ReaderFrame
5from itertools import count
6import numpy as np
9def chunk_to_atoms(chunk):
10 atoms = chunk.build()
11 return atoms
14def iread(f, max_workers=None):
15 """Reads extxyz in paralell using multiprocess"""
17 # chunks are simple objects
18 chunk_iterator = iter(ixyzchunks(f))
20 with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as ex:
22 buff = []
23 for i in range(ex._max_workers):
24 try:
25 chunk = next(chunk_iterator)
26 buff.append(ex.submit(chunk_to_atoms, chunk))
27 except RuntimeError:
28 pass
29 except StopIteration:
30 pass
32 while True:
33 if len(buff) == 0:
34 break
36 res = buff.pop(0)
38 try:
39 chunk = next(chunk_iterator)
40 buff.append(ex.submit(chunk_to_atoms, chunk))
41 except RuntimeError:
42 pass
43 except StopIteration:
44 pass
46 atoms = res.result()
47 yield atoms
50class ExtxyzTrajectoryReader(AbstractTrajectoryReader):
51 """Read extend xyz trajectory file, typically produced by GPUMD
53 This is a naive (and comparatively slow) parallel implementation which
54 relies on the ASE xyz reader.
56 Parameters
57 ----------
58 filename
59 Name of input file.
60 x_factor
61 Conversion factor between the length unit used in the trajectory and the internal
62 dynasor length unit.
63 t_factor
64 Conversion factor between the time unit used in the trajectory and the internal
65 dynasor time unit.
66 max_workers
67 Number of working processes; defaults to ``None``, which means that the number of
68 processors on the machine is used.
69 """
71 def __init__(self,
72 filename: str,
73 length_unit: str = None,
74 time_unit: str = None,
75 max_workers: int = None):
77 # setup generator object
78 self._fobj = open(filename, 'r')
79 self._generator_xyz = iread(self._fobj, max_workers=max_workers)
80 self._open = True
81 self._frame_index = count(0)
83 # setup units
84 if length_unit is None:
85 self.x_factor = 1.0
86 else:
87 self.x_factor = self.lengthunits_to_nm_table[length_unit]
88 if time_unit is None:
89 self.t_factor = 1.0
90 else:
91 self.t_factor = self.timeunits_to_fs_table[time_unit]
92 self.v_factor = self.x_factor / self.t_factor
94 def _get_next(self):
95 try:
96 atoms = next(self._generator_xyz)
97 except Exception:
98 self._fobj.close()
99 self._open = False
100 raise StopIteration
102 self._atom_types = np.array(list(atoms.symbols))
103 self._n_atoms = len(atoms)
104 self._cell = atoms.cell[:]
105 self._x = atoms.positions
106 if 'vel' in atoms.arrays:
107 self._v = atoms.arrays['vel']
108 else:
109 self._v = None
111 def __iter__(self):
112 return self
114 def close(self):
115 if not self._fobj.closed:
116 self._fobj.close()
117 self._open = False
119 def __next__(self):
120 if not self._open: 120 ↛ 121line 120 didn't jump to line 121, because the condition on line 120 was never true
121 raise StopIteration
123 self._get_next()
125 if self._v is not None:
126 frame = ReaderFrame(frame_index=next(self._frame_index),
127 n_atoms=int(self._n_atoms),
128 cell=self.x_factor * self._cell.copy('F'),
129 positions=self.x_factor * self._x,
130 velocities=self.v_factor * self._v,
131 atom_types=self._atom_types
132 )
133 else:
134 frame = ReaderFrame(frame_index=next(self._frame_index),
135 n_atoms=int(self._n_atoms),
136 cell=self.x_factor * self._cell.copy('F'),
137 positions=self.x_factor * self._x,
138 atom_types=self._atom_types
139 )
140 return frame