ESPResSo
Extensible Simulation Package for Research on Soft Matter Systems
Loading...
Searching...
No Matches
h5md_core.cpp
Go to the documentation of this file.
1/*
2 * Copyright (C) 2010-2026 The ESPResSo project
3 * Copyright (C) 2002,2003,2004,2005,2006,2007,2008,2009,2010
4 * Max-Planck-Institute for Polymer Research, Theory Group
5 *
6 * This file is part of ESPResSo.
7 *
8 * ESPResSo is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * ESPResSo is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 */
21
22#include "hdf5_patches.hpp" // must appear first
23
24#include "h5md_core.hpp"
25#include "h5md_dataset.hpp"
27
28#include "BoxGeometry.hpp"
29#include "Particle.hpp"
31
32#include <config/version.hpp>
33
34#include <utils/Vector.hpp>
35
36#include <boost/array.hpp>
37#include <boost/mpi/collectives.hpp>
38#include <boost/multi_array.hpp>
39
40#if defined(__GNUC__) or defined(__GNUG__)
41// ignore false positive: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=119388
42#pragma GCC diagnostic push
43#pragma GCC diagnostic ignored "-Wuninitialized"
44#endif
45#if __has_include(<highfive/boost.hpp>)
46#include <highfive/boost.hpp>
47#endif
48#include <highfive/highfive.hpp>
49#if defined(__GNUC__) or defined(__GNUG__)
50#pragma GCC diagnostic pop
51#endif
52
53#include <mpi.h>
54
55#include <algorithm>
56#include <cstddef>
57#include <filesystem>
58#include <fstream>
59#include <functional>
60#include <iterator>
61#include <memory>
62#include <ranges>
63#include <stdexcept>
64#include <string>
65#include <utility>
66#include <vector>
67
68namespace Writer {
69namespace H5md {
70
71using MultiArray3i = boost::multi_array<int, 3>;
78
79static std::unordered_map<std::string, H5MDOutputFields> const fields_map = {
80 {"all", H5MD_OUT_ALL},
81 {"particle.type", H5MD_OUT_TYPE},
82 {"particle.position", H5MD_OUT_POS},
83 {"particle.image", H5MD_OUT_IMG},
84 {"particle.velocity", H5MD_OUT_VEL},
85 {"particle.force", H5MD_OUT_FORCE},
86 {"particle.bonds", H5MD_OUT_BONDS},
87 {"particle.charge", H5MD_OUT_CHARGE},
88 {"particle.mass", H5MD_OUT_MASS},
89 {"box.length", H5MD_OUT_BOX_L},
90 {"lees_edwards.offset", H5MD_OUT_LE_OFF},
91 {"lees_edwards.direction", H5MD_OUT_LE_DIR},
92 {"lees_edwards.normal", H5MD_OUT_LE_NORMAL},
93};
94
95static auto fields_list_to_bitfield(std::vector<std::string> const &fields) {
96 unsigned int bitfield = H5MD_OUT_NONE;
97 for (auto const &field_name : fields) {
98 if (not fields_map.contains(field_name)) {
99 throw std::invalid_argument("Unknown field '" + field_name + "'");
100 }
102 }
103 return bitfield;
104}
105
106static void backup_file(std::filesystem::path const &from,
107 std::filesystem::path const &to) {
108 /*
109 * If the file itself *and* a backup file exists, something must
110 * have gone wrong.
111 */
112 auto constexpr option_fail_if_exists = std::filesystem::copy_options::none;
113 try {
114 std::filesystem::copy_file(from, to, option_fail_if_exists);
115 } catch (std::filesystem::filesystem_error const &) {
116 throw left_backupfile();
117 }
118}
119
120template <typename extent_type>
121static void extend_dataset(HighFive::DataSet &dataset,
122 extent_type const &change_extent) {
123 auto extents = dataset.getSpace().getDimensions();
124 auto const rank = extents.size();
125 /* Extend the dataset for another timestep */
126 for (auto i = 0u; i < rank; i++) {
127 extents[i] += change_extent[i];
128 }
129 dataset.resize(extents);
130}
131
132template <typename value_type, typename extent_type>
133static void write_dataset(value_type const &data, HighFive::DataSet &dataset,
134 extent_type const &offset, extent_type const &count) {
135 auto xfer_props = HighFive::DataTransferProps{};
136 xfer_props.add(HighFive::UseCollectiveIO{});
137 /* write the data to the dataset. */
138 dataset.select(offset, count).write(data, xfer_props);
139}
140
141template <typename value_type, typename extent_type>
142static void write_dataset(value_type const &data, HighFive::DataSet &dataset,
144 extent_type const &offset, extent_type const &count) {
146 /* write the data to the dataset. */
147 write_dataset(data, dataset, offset, count);
148}
149
150static void write_script(HighFive::File &h5md_file,
151 std::filesystem::path const &script_path) {
152 if (!script_path.empty()) {
153 std::ifstream scriptfile(script_path);
154 std::string buffer(std::istreambuf_iterator<char>(scriptfile),
155 std::istreambuf_iterator<char>{});
156 h5md_file.createGroup("/parameters");
157 auto group = h5md_file.createGroup("/parameters/files");
158 group.createAttribute("script", buffer);
159 }
160}
161
162/* Initialize the file-related variables after parameters have been set. */
163void File::init_file() {
164 auto const file_exists = std::filesystem::exists(m_file_path);
165 auto const backup_file_exists = std::filesystem::exists(m_backup_path);
166 /* Perform a barrier synchronization. Otherwise one process might already
167 * create the file while another still checks for its existence. */
168 m_comm.barrier();
169 if (file_exists) {
170 if (m_h5md_specification.is_compliant(m_file_path)) {
171 /*
172 * If the file exists and has a valid H5MD structure, let's create a
173 * backup of it. This has the advantage, that the new file can
174 * just be deleted if the simulation crashes at some point and we
175 * still have a valid trajectory backed up, from which we can restart.
176 */
177 if (m_comm.rank() == 0)
178 backup_file(m_file_path, m_backup_path);
179 load_file();
180 } else {
181 throw incompatible_h5mdfile();
182 }
183 } else {
185 throw left_backupfile();
186 create_file();
187 }
188}
189
190void File::load_datasets() {
191 auto &datasets = m_datasets;
192 for (auto const &ds : m_h5md_specification.get_datasets()) {
193 if (ds.is_link)
194 continue;
195 auto path = ds.path();
196 datasets[path] = m_h5md_file->getDataSet(path);
197 }
198}
199
200void File::create_groups() {
201 for (auto const &ds : m_h5md_specification.get_datasets()) {
202 std::stringstream ss(ds.group);
203 std::string segment;
204 std::string current_path = "/";
205 while (std::getline(ss, segment, '/')) {
206 if (segment.empty())
207 continue;
208 current_path += "/" + segment;
209 if (!m_h5md_file->exist(current_path)) {
210 m_h5md_file->createGroup(current_path);
211 }
212 }
213 }
214}
215
216static std::vector<std::size_t> create_dims(hsize_t rank, hsize_t data_dim) {
217 if (rank == 3ul) {
218 return {0ul, 0ul, data_dim};
219 }
220 if (rank == 2ul) {
221 return {0ul, data_dim};
222 }
223 assert(rank == 1ul);
224 return {data_dim};
225}
226
227static std::vector<std::size_t> create_maxdims(hsize_t rank, hsize_t data_dim,
229 if (rank == 3ul) {
230 return {max_dim, max_dim, data_dim};
231 }
232 if (rank == 2ul) {
233 return {max_dim, max_dim};
234 }
235 assert(rank == 1ul);
236 return {max_dim};
237}
238
239static std::vector<hsize_t> create_chunk_dims(hsize_t rank, hsize_t data_dim,
240 hsize_t size) {
241 auto const chunk_size = (rank > 1ul) ? size : hsize_t{1ul};
242 if (rank == 3ul) {
243 return {1ul, chunk_size, data_dim};
244 }
245 if (rank == 2ul) {
246 return {1ul, chunk_size};
247 }
248 assert(rank == 1ul);
249 return {chunk_size};
250}
251
252void File::create_datasets() {
253 auto &datasets = m_datasets;
254 for (auto const &ds : m_h5md_specification.get_datasets()) {
255 if (ds.is_link)
256 continue;
257 auto dims = create_dims(ds.rank, ds.data_dim);
258 auto maxdims = create_maxdims(ds.rank, ds.data_dim, H5S_UNLIMITED);
259 auto dataspace = HighFive::DataSpace(dims, maxdims);
260 auto const chunk_size = static_cast<hsize_t>(m_chunk_size);
261 auto const chunk = create_chunk_dims(ds.rank, ds.data_dim, chunk_size);
262 HighFive::DataSetCreateProps props;
263 props.add(HighFive::Chunking(chunk));
264 auto path = ds.path();
265 if (ds.type == H5T_NATIVE_INT) {
266 datasets.emplace(path,
267 m_h5md_file->createDataSet<int>(path, dataspace, props));
268 } else if (ds.type == H5T_NATIVE_DOUBLE) {
269 datasets.emplace(
270 path, m_h5md_file->createDataSet<double>(path, dataspace, props));
271 }
272 }
273}
274
275void File::load_file() {
276 HighFive::FileAccessProps fapl;
277 fapl.add(HighFive::MPIOFileAccess{m_comm, MPI_INFO_NULL});
278 fapl.add(HighFive::MPIOCollectiveMetadata{});
279 m_h5md_file = std::make_unique<HighFive::File>(
280 m_file_path.string(), HighFive::File::ReadWrite, fapl);
281 load_datasets();
282}
283
284static void write_attributes(HighFive::File &h5md_file) {
285 auto h5md_group = h5md_file.createGroup("h5md");
286 auto att = h5md_group.createAttribute<std::size_t>(
287 "version",
288 HighFive::DataSpace::From(std::array<std::size_t, 2>{{1ul, 1ul}}));
289 att.write(std::array<std::size_t, 2>{{1ul, 1ul}});
290 auto h5md_creator_group = h5md_group.createGroup("creator");
291 h5md_creator_group.createAttribute("name", "ESPResSo");
292 h5md_creator_group.createAttribute("version", ESPRESSO_VERSION);
293 auto h5md_author_group = h5md_group.createGroup("author");
294 h5md_author_group.createAttribute("name", "N/A");
295 auto box_path = "/particles/atoms/box";
296 if (h5md_file.exist(box_path)) {
297 auto group = h5md_file.getGroup(box_path);
298 group.createAttribute("dimension", 3);
299 group.createAttribute("boundary", "periodic");
300 }
301}
302
303void File::write_units() {
304 auto &datasets = m_datasets;
305 if (!mass_unit().empty() and (m_fields & H5MD_OUT_MASS)) {
306 datasets.at("/particles/atoms/mass/value")
307 .createAttribute("unit", mass_unit());
308 }
309 if (!charge_unit().empty() and (m_fields & H5MD_OUT_CHARGE)) {
310 datasets.at("/particles/atoms/charge/value")
311 .createAttribute("unit", charge_unit());
312 }
313 if (!length_unit().empty() and (m_fields & H5MD_OUT_BOX_L)) {
314 datasets.at("/particles/atoms/position/value")
315 .createAttribute("unit", length_unit());
316 datasets.at("/particles/atoms/box/edges/value")
317 .createAttribute("unit", length_unit());
318 }
319 if (!length_unit().empty() and (m_fields & H5MD_OUT_LE_OFF)) {
320 datasets.at("/particles/atoms/lees_edwards/offset/value")
321 .createAttribute("unit", length_unit());
322 }
323 if (!velocity_unit().empty() and (m_fields & H5MD_OUT_VEL)) {
324 datasets.at("/particles/atoms/velocity/value")
325 .createAttribute("unit", velocity_unit());
326 }
327 if (!force_unit().empty() and (m_fields & H5MD_OUT_FORCE)) {
328 datasets.at("/particles/atoms/force/value")
329 .createAttribute("unit", force_unit());
330 }
331 if (!time_unit().empty()) {
332 datasets.at("/particles/atoms/id/time")
333 .createAttribute("unit", time_unit());
334 }
335}
336
337void File::create_hard_links() {
338 std::string path_step = "/particles/atoms/id/step";
339 std::string path_time = "/particles/atoms/id/time";
340 for (auto &ds : m_h5md_specification.get_datasets()) {
341 if (ds.is_link) {
342 char const *from = nullptr;
343 if (ds.name == "step") {
344 from = path_step.c_str();
345 } else if (ds.name == "time") {
346 from = path_time.c_str();
347 }
348 assert(from != nullptr);
349 if (H5Lcreate_hard(m_h5md_file->getId(), from, m_h5md_file->getId(),
350 ds.path().c_str(), H5P_DEFAULT, H5P_DEFAULT) < 0) {
351 throw std::runtime_error("Error creating hard link for " + ds.path());
352 }
353 }
354 }
355}
356
357void File::create_file() {
358 HighFive::FileAccessProps fapl;
359 fapl.add(HighFive::MPIOFileAccess{m_comm, MPI_INFO_NULL});
360 fapl.add(HighFive::MPIOCollectiveMetadata{});
361 m_h5md_file = std::make_unique<HighFive::File>(m_file_path.string(),
362 HighFive::File::Create, fapl);
363 write_script(*m_h5md_file, m_absolute_script_path);
364 create_groups();
365 create_datasets();
366 write_attributes(*m_h5md_file);
367 write_units();
368 create_hard_links();
369}
370
372 if (m_comm.rank() == 0) {
373 std::filesystem::remove(m_backup_path);
374 }
375}
376
377namespace detail {
378
379template <std::size_t rank> struct slice_info {};
380
381template <> struct slice_info<3> {
382 static auto extent(hsize_t n_part_diff) {
383 return Vector3s{1ul, n_part_diff, 0ul};
384 }
385 static constexpr auto count(std::size_t local_n_part) {
386 return Vector3s{1ul, local_n_part, 3ul};
387 }
388 static auto offset(hsize_t n_time_steps, hsize_t prefix) {
389 return Vector3s{n_time_steps, prefix, 0ul};
390 }
391 template <typename T>
392 static boost::multi_array<T, 3> reshape(std::vector<T> const &v1d,
393 Vector3s const &count) {
394 if (v1d.empty()) {
395 boost::multi_array<T, 3> data(boost::extents[0][0][0]);
396 return data;
397 }
398 auto const rows = count[1];
399 auto const cols = count[2];
400
401 boost::multi_array<T, 3> data(
402 boost::extents[1][static_cast<long>(rows)][static_cast<long>(cols)]);
403
404 for (std::size_t i = 0; i < rows; ++i) {
405 for (std::size_t j = 0; j < cols; ++j) {
406 data[0][i][j] = v1d[cols * i + j];
407 }
408 }
409
410 return data;
411 }
412};
413
414template <> struct slice_info<2> {
415 static auto extent(hsize_t n_part_diff) { return Vector2s{1ul, n_part_diff}; }
416 static constexpr auto count(std::size_t local_n) {
417 return Vector2s{1ul, local_n};
418 }
419 static auto offset(hsize_t n_time_steps, hsize_t prefix) {
421 }
422 template <typename T>
423 static boost::multi_array<T, 2> reshape(std::vector<T> const &v1d,
424 Vector2s const &count) {
425 if (v1d.empty()) {
426 boost::multi_array<T, 2> data(boost::extents[0][0]);
427 return data;
428 }
429 auto const cols = count[1];
430
431 boost::multi_array<T, 2> data(boost::extents[1][static_cast<long>(cols)]);
432
433 for (std::size_t i = 0; i < cols; ++i) {
434 data[0][i] = v1d[i];
435 }
436
437 return data;
438 }
439};
440
441template <typename T> struct get_buffer_traits {};
442
443template <typename T>
444 requires std::is_arithmetic_v<T>
445struct get_buffer_traits<T> {
446 using type = T;
447 constexpr static std::size_t dim = 1ul;
448};
449
450template <typename T, std::size_t N>
451 requires std::is_arithmetic_v<T>
452struct get_buffer_traits<Utils::Vector<T, N>> {
453 using type = T;
454 constexpr static std::size_t dim = N;
455};
456
457template <typename Functor> class ParticleDataSerializer {
458 using RetVal = std::decay_t<std::invoke_result_t<Functor, Particle const &>>;
459 Functor m_getter;
460
461 template <typename T>
462 requires std::is_arithmetic_v<T>
463 void serialize(auto &buffer, T const &value) const {
464 buffer.emplace_back(value);
465 }
466
467 template <typename T, std::size_t N>
468 void serialize(auto &buffer, Utils::Vector<T, N> const &value) const {
469 buffer.insert(buffer.end(), value.cbegin(), value.cend());
470 }
471
472public:
473 explicit ParticleDataSerializer(Functor lambda) : m_getter{lambda} {}
474
475 auto operator()(ParticleRange const &particles) const {
476 auto constexpr value_dim = get_buffer_traits<RetVal>::dim;
477 std::vector<typename get_buffer_traits<RetVal>::type> buffer{};
478 buffer.reserve(particles.size() * value_dim);
479 for (auto const &p : particles) {
480 serialize(buffer, m_getter(p));
481 }
482 return buffer;
483 }
484};
485
486template <typename Functor> auto make_serializer(Functor lambda) {
487 return ParticleDataSerializer<Functor>{lambda};
488}
489template <typename RetVal>
490auto make_serializer(RetVal (Particle::*getter)() const) {
491 auto kernel = [getter](Particle const &p) -> RetVal { return (p.*getter)(); };
492 return ParticleDataSerializer<decltype(kernel)>{std::move(kernel)};
493}
494
495} // namespace detail
496
497template <std::size_t dim, typename Serializer>
499 ParticleRange const &particles,
500 HighFive::DataSet &dataset,
502 auto const n_part_local = static_cast<hsize_t>(particles.size());
503 auto const old_extents = dataset.getSpace().getDimensions();
504 auto const extent_n_part =
505 std::max(n_part_global, static_cast<hsize_t>(old_extents[1])) -
506 old_extents[1];
507 extend_dataset(dataset, detail::slice_info<dim>::extent(extent_n_part));
508 auto const count = detail::slice_info<dim>::count(n_part_local);
509 auto const offset = detail::slice_info<dim>::offset(old_extents[0], prefix);
510 HighFive::DataType dtype = dataset.getDataType();
511 auto buffer = serializer(particles);
512 write_dataset(detail::slice_info<dim>::reshape(buffer, count), dataset,
513 offset, count);
514}
515
516static void write_box(BoxGeometry const &box_geo, HighFive::DataSet &dataset) {
517 auto const extents = dataset.getSpace().getDimensions();
519 Vector2s const offset{extents[0], 0ul};
520 Vector2s const count{1ul, 3ul};
521 auto const data = box_geo.length().as_vector();
522 write_dataset(detail::slice_info<2>::reshape(data, count), dataset, offset,
523 count);
524}
525
526static void write_le_off(LeesEdwardsBC const &lebc,
527 HighFive::DataSet &dataset) {
528 auto const extents = dataset.getSpace().getDimensions();
530 Vector2s const offset{extents[0], 0ul};
531 Vector2s const count{1ul, 1ul};
532 auto const data = std::vector<double>{lebc.pos_offset};
533 write_dataset(detail::slice_info<2>::reshape(data, count), dataset, offset,
534 count);
535}
536
537static void write_le_dir(LeesEdwardsBC const &lebc,
538 HighFive::DataSet &dataset) {
539 auto const shear_direction = static_cast<int>(lebc.shear_direction);
540 auto const extents = dataset.getSpace().getDimensions();
542 Vector2s const offset{extents[0], 0ul};
543 Vector2s const count{1ul, 1ul};
544 auto const data = std::vector<int>{shear_direction};
545 write_dataset(detail::slice_info<2>::reshape(data, count), dataset, offset,
546 count);
547}
548
550 HighFive::DataSet &dataset) {
551 auto const shear_plane_normal = static_cast<int>(lebc.shear_plane_normal);
552 auto const extents = dataset.getSpace().getDimensions();
554 Vector2s const offset{extents[0], 0ul};
555 Vector2s const count{1ul, 1ul};
556 auto const data = std::vector<int>{shear_plane_normal};
557 write_dataset(detail::slice_info<2>::reshape(data, count), dataset, offset,
558 count);
559}
560
561void File::write(const ParticleRange &particles, double time, int step,
562 BoxGeometry const &box_geo) {
563 auto &datasets = m_datasets;
564 if (m_fields & H5MD_OUT_BOX_L) {
565 write_box(box_geo, datasets.at("/particles/atoms/box/edges/value"));
566 }
567 auto const &lebc = box_geo.lees_edwards_bc();
568 if (m_fields & H5MD_OUT_LE_OFF) {
570 datasets.at("/particles/atoms/lees_edwards/offset/value"));
571 }
572 if (m_fields & H5MD_OUT_LE_DIR) {
574 datasets.at("/particles/atoms/lees_edwards/direction/value"));
575 }
576 if (m_fields & H5MD_OUT_LE_NORMAL) {
578 datasets.at("/particles/atoms/lees_edwards/normal/value"));
579 }
580
581 // calculate particle count and offset
582 static_assert(sizeof(hsize_t) == 8ul);
583 auto const n_part_local = static_cast<hsize_t>(particles.size());
584 hsize_t prefix{0ul};
587 auto const n_part_global =
588 boost::mpi::all_reduce(m_comm, n_part_local, std::plus<hsize_t>());
589
591 datasets.at("/particles/atoms/id/value"),
592 detail::make_serializer(&Particle::id));
593
594 {
595 HighFive::DataSet &dataset = datasets.at("/particles/atoms/id/value");
596 auto const extents = dataset.getSpace().getDimensions();
597 write_dataset(std::vector<double>{time},
598 datasets.at("/particles/atoms/id/time"), Vector1s{1},
599 Vector1s{extents[0]}, Vector1s{1});
600 write_dataset(std::vector<int>{step},
601 datasets.at("/particles/atoms/id/step"), Vector1s{1},
602 Vector1s{extents[0]}, Vector1s{1});
603 }
604
605 if (m_fields & H5MD_OUT_TYPE) {
607 datasets.at("/particles/atoms/species/value"),
608 detail::make_serializer(&Particle::type));
609 }
610 if (m_fields & H5MD_OUT_MASS) {
612 datasets.at("/particles/atoms/mass/value"),
613 detail::make_serializer(&Particle::mass));
614 }
615 if (m_fields & H5MD_OUT_POS) {
617 prefix, n_part_global, particles,
618 datasets.at("/particles/atoms/position/value"),
619 detail::make_serializer([&](Particle const &p) {
620 return box_geo.folded_position(p.pos());
621 }));
622 }
623 if (m_fields & H5MD_OUT_IMG) {
625 prefix, n_part_global, particles,
626 datasets.at("/particles/atoms/image/value"),
627 detail::make_serializer([&](Particle const &p) {
628 return box_geo.folded_image_box(p.pos(), p.image_box());
629 }));
630 }
631 if (m_fields & H5MD_OUT_VEL) {
633 prefix, n_part_global, particles,
634 datasets.at("/particles/atoms/velocity/value"),
635 detail::make_serializer(&Particle::v));
636 }
637 if (m_fields & H5MD_OUT_FORCE) {
639 datasets.at("/particles/atoms/force/value"),
640 detail::make_serializer(&Particle::force));
641 }
642 if (m_fields & H5MD_OUT_CHARGE) {
644 datasets.at("/particles/atoms/charge/value"),
645 detail::make_serializer(&Particle::q));
646 }
647 if (m_fields & H5MD_OUT_BONDS) {
648 write_connectivity(particles);
649 }
650}
651
652void File::write_connectivity(const ParticleRange &particles) {
653 MultiArray3i bond(boost::extents[0][0][0]);
654 for (auto const &p : particles) {
655 auto nbonds_local = static_cast<decltype(bond)::index>(bond.shape()[1]);
656 for (auto const b : p.bonds()) {
657 auto const &partner_ids = b.partner_ids();
658 if (partner_ids.size() == 1u) {
659 bond.resize(boost::extents[1][nbonds_local + 1][2]);
660 bond[0][nbonds_local][0] = p.id();
661 bond[0][nbonds_local][1] = partner_ids[0];
662 nbonds_local++;
663 }
664 }
665 }
666
667 auto const n_bonds_local = static_cast<int>(bond.shape()[1]);
668 auto &datasets = m_datasets;
669 int prefix_bonds = 0;
672 auto const n_bonds_total =
673 boost::mpi::all_reduce(m_comm, n_bonds_local, std::plus<int>());
674 auto const extents =
675 datasets.at("/connectivity/atoms/value").getSpace().getDimensions();
676 Vector3s offset_bonds = {extents[0], static_cast<std::size_t>(prefix_bonds),
677 0};
678 Vector3s count_bonds = {1, static_cast<std::size_t>(n_bonds_local), 2};
679 auto const n_bond_diff = std::max(static_cast<hsize_t>(n_bonds_total),
680 static_cast<hsize_t>(extents[1])) -
681 extents[1];
682 Vector3s change_extent_bonds = {1, static_cast<std::size_t>(n_bond_diff), 0};
683 write_dataset(bond, datasets.at("/connectivity/atoms/value"),
685}
686
687void File::flush() { m_h5md_file->flush(); }
688
689std::vector<std::string> File::valid_fields() const {
690 auto const view = std::views::elements<0>(fields_map);
691 return {view.begin(), view.end()};
692}
693
694File::File(std::filesystem::path file_path, std::filesystem::path script_path,
695 std::vector<std::string> const &output_fields, std::string mass_unit,
696 std::string length_unit, std::string time_unit,
697 std::string force_unit, std::string velocity_unit,
698 std::string charge_unit, int chunk_size)
699 : m_file_path(std::move(file_path)), m_backup_path(m_file_path),
700 m_script_path(std::move(script_path)),
701 m_absolute_script_path(
702 m_script_path.empty()
703 ? std::filesystem::path()
704 : std::filesystem::weakly_canonical(m_script_path)),
705 m_mass_unit(std::move(mass_unit)), m_length_unit(std::move(length_unit)),
706 m_time_unit(std::move(time_unit)), m_force_unit(std::move(force_unit)),
707 m_velocity_unit(std::move(velocity_unit)),
708 m_charge_unit(std::move(charge_unit)), m_chunk_size(chunk_size),
709 m_comm(boost::mpi::communicator()),
710 m_fields(fields_list_to_bitfield(output_fields)), m_datasets(),
711 m_h5md_specification(m_fields) {
712 if (chunk_size <= 0) {
713 throw std::domain_error("Parameter 'chunk_size' must be > 0");
714 }
715 m_backup_path += ".bak";
716 init_file();
717}
718
720 m_datasets.clear();
721 m_h5md_file.reset();
722}
723
724} /* namespace H5md */
725} /* namespace Writer */
Vector implementation and trait types for boost qvm interoperability.
Utils::Vector3d const & length() const
Box length.
LeesEdwardsBC const & lees_edwards_bc() const
A range of particles.
base_type::size_type size() const
DEVICE_QUALIFIER constexpr const_iterator cbegin() const noexcept
Definition Array.hpp:148
std::vector< T > as_vector() const
Definition Vector.hpp:139
DEVICE_QUALIFIER constexpr const_iterator cend() const noexcept
Definition Array.hpp:160
void write(const ParticleRange &particles, double time, int step, BoxGeometry const &geometry)
Write data to the hdf5 file.
auto const & chunk_size() const
Retrieve the set chunk size.
auto const & length_unit() const
Retrieve the set length unit.
auto const & time_unit() const
Retrieve the set time unit.
void close()
Method to perform the renaming of the temporary file from "filename" + ".bak" to "filename".
auto const & force_unit() const
Retrieve the set force unit.
auto const & mass_unit() const
Retrieve the set mass unit.
auto const & charge_unit() const
Retrieve the set charge unit.
auto const & velocity_unit() const
Retrieve the set velocity unit.
std::vector< std::string > valid_fields() const
Build the list of valid output fields.
File(std::filesystem::path file_path, std::filesystem::path script_path, std::vector< std::string > const &output_fields, std::string mass_unit, std::string length_unit, std::string time_unit, std::string force_unit, std::string velocity_unit, std::string charge_unit, int chunk_size)
Constructor.
void flush()
Method to enforce flushing the buffer to disk.
cudaStream_t stream[1]
CUDA streams for parallel computing on CPU and GPU.
Communicator communicator
ParticleRange particles(std::span< Cell *const > cells)
boost::multi_array< int, 3 > MultiArray3i
Definition h5md_core.cpp:71
static void write_le_normal(LeesEdwardsBC const &lebc, HighFive::DataSet &dataset)
static std::vector< std::size_t > create_maxdims(hsize_t rank, hsize_t data_dim, hsize_t max_dim)
static auto fields_list_to_bitfield(std::vector< std::string > const &fields)
Definition h5md_core.cpp:95
static void write_box(BoxGeometry const &box_geo, HighFive::DataSet &dataset)
static void write_script(HighFive::File &h5md_file, std::filesystem::path const &script_path)
static std::unordered_map< std::string, H5MDOutputFields > const fields_map
Definition h5md_core.cpp:79
static void write_dataset(value_type const &data, HighFive::DataSet &dataset, extent_type const &offset, extent_type const &count)
static std::vector< hsize_t > create_chunk_dims(hsize_t rank, hsize_t data_dim, hsize_t size)
Utils::Vector< std::size_t, 2 > Vector2s
Definition h5md_core.cpp:76
static void extend_dataset(HighFive::DataSet &dataset, extent_type const &change_extent)
static void write_le_off(LeesEdwardsBC const &lebc, HighFive::DataSet &dataset)
static void backup_file(std::filesystem::path const &from, std::filesystem::path const &to)
static void write_le_dir(LeesEdwardsBC const &lebc, HighFive::DataSet &dataset)
static void write_attributes(HighFive::File &h5md_file)
Utils::Vector< std::size_t, 3 > Vector3s
Definition h5md_core.cpp:77
void write_td_particle_property(hsize_t prefix, hsize_t n_part_global, ParticleRange const &particles, HighFive::DataSet &dataset, Serializer serializer)
static std::vector< std::size_t > create_dims(hsize_t rank, hsize_t data_dim)
void serialize(Archive &ar, std::tuple< T... > &pack, unsigned int const)
Serialize std::tuple.
STL namespace.
Struct holding all information for one particle.
Definition Particle.hpp:435
auto const & mass() const
Definition Particle.hpp:492
auto const & q() const
Definition Particle.hpp:578
auto const & v() const
Definition Particle.hpp:473
auto const & type() const
Definition Particle.hpp:458
auto const & force() const
Definition Particle.hpp:475
auto const & id() const
Definition Particle.hpp:454
bool is_compliant(std::filesystem::path const &file) const