ESPResSo
Extensible Simulation Package for Research on Soft Matter Systems
Loading...
Searching...
No Matches
RegularDecomposition.cpp
Go to the documentation of this file.
1/*
2 * Copyright (C) 2010-2026 The ESPResSo project
3 * Copyright (C) 2002,2003,2004,2005,2006,2007,2008,2009,2010
4 * Max-Planck-Institute for Polymer Research, Theory Group
5 *
6 * This file is part of ESPResSo.
7 *
8 * ESPResSo is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * ESPResSo is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 */
21
23
24#include "cell_system/Cell.hpp"
25
26#include "communication.hpp"
28#include "errorhandling.hpp"
29#include "system/System.hpp"
30
31#include <utils/Vector.hpp>
32#include <utils/index.hpp>
35
36#include <boost/container/flat_set.hpp>
37#include <boost/mpi/collectives/all_reduce.hpp>
38#include <boost/mpi/communicator.hpp>
39#include <boost/mpi/request.hpp>
40#include <boost/range/algorithm/reverse.hpp>
41#include <boost/range/numeric.hpp>
42
43#include <algorithm>
44#include <array>
45#include <cmath>
46#include <cstddef>
47#include <functional>
48#include <initializer_list>
49#include <iterator>
50#include <utility>
51#include <vector>
52
53int RegularDecomposition::position_to_cell_index(
54 Utils::Vector3d const &pos) const {
56
57 for (auto i = 0u; i < 3u; i++) {
58 cpos[i] = static_cast<int>(std::floor(pos[i] * inv_cell_size[i])) + 1 -
59 cell_offset[i];
60
61 /* particles outside our box. Still take them if
62 nonperiodic boundary. We also accept the particle if we are at
63 the box boundary, and the particle is within the box. In this case
64 the particle belongs here and could otherwise potentially be dismissed
65 due to rounding errors. */
66 if (cpos[i] < 1) {
67 if ((!m_box.periodic(i) or (pos[i] >= m_box.length()[i])) and
68 m_local_box.boundary()[2u * i])
69 cpos[i] = 1;
70 else
71 return -1;
72 } else if (cpos[i] > cell_grid[i]) {
73 if ((!m_box.periodic(i) or (pos[i] < m_box.length()[i])) and
74 m_local_box.boundary()[2u * i + 1u])
75 cpos[i] = cell_grid[i];
76 else
77 return -1;
78 }
79 }
80
82}
83
84void RegularDecomposition::move_if_local(
86 std::vector<ParticleChange> &modified_cells) {
87 for (auto &part : src) {
88 auto target_cell = position_to_cell(part.pos());
89
90 if (target_cell) {
91 target_cell->particles().insert(std::move(part));
92 modified_cells.emplace_back(ModifiedList{target_cell->particles()});
93 } else {
94 rest.insert(std::move(part));
95 }
96 }
97
98 src.clear();
99}
100
101void RegularDecomposition::move_left_or_right(ParticleList &src,
104 int dir) const {
105 auto const is_open_boundary_left = m_local_box.boundary()[2 * dir] != 0;
106 auto const is_open_boundary_right = m_local_box.boundary()[2 * dir + 1] != 0;
109 auto const my_left = m_local_box.my_left()[dir];
110 auto const my_right = m_local_box.my_right()[dir];
111 for (auto it = src.begin(); it != src.end();) {
112 auto const pos = it->pos()[dir];
113 if (m_box.get_mi_coord(pos, my_right, dir) >= 0. and can_move_right) {
114 right.insert(std::move(*it));
115 it = src.erase(it);
116 } else if (m_box.get_mi_coord(pos, my_left, dir) < 0. and can_move_left) {
117 left.insert(std::move(*it));
118 it = src.erase(it);
119 } else {
120 ++it;
121 }
122 }
123}
124
125void RegularDecomposition::exchange_neighbors(
126 ParticleList &pl, std::vector<ParticleChange> &modified_cells) {
127 auto const node_neighbors = Utils::Mpi::cart_neighbors<3>(m_comm);
129
130 for (int dir = 0; dir < 3; dir++) {
131 /* Single node direction, no action needed. */
132 if (Utils::Mpi::cart_get<3>(m_comm).dims[dir] == 1) {
133 continue;
134 /* In this (common) case left and right neighbors are
135 the same, and we need only one communication */
136 }
137 if (Utils::Mpi::cart_get<3>(m_comm).dims[dir] == 2) {
138 move_left_or_right(pl, send_buf_l, send_buf_l, dir);
139
141 node_neighbors[2 * dir], 0, recv_buf_l);
142
143 send_buf_l.clear();
144 } else {
145 using boost::mpi::request;
147
148 move_left_or_right(pl, send_buf_l, send_buf_r, dir);
149
150 auto req_l = isendrecv(m_comm, node_neighbors[2 * dir], 0, send_buf_l,
151 node_neighbors[2 * dir], 0, recv_buf_l);
152 auto req_r = isendrecv(m_comm, node_neighbors[2 * dir + 1], 0, send_buf_r,
153 node_neighbors[2 * dir + 1], 0, recv_buf_r);
154
155 std::array<request, 4> reqs{{req_l[0], req_l[1], req_r[0], req_r[1]}};
156 boost::mpi::wait_all(reqs.begin(), reqs.end());
157
158 send_buf_l.clear();
159 send_buf_r.clear();
160 }
161
162 move_if_local(recv_buf_l, pl, modified_cells);
163 move_if_local(recv_buf_r, pl, modified_cells);
164 }
165}
166
167/**
168 * @brief Fold coordinates to box and reset the old position.
169 */
170static void fold_and_reset(Particle &p, BoxGeometry const &box_geo) {
171 box_geo.fold_position(p.pos(), p.image_box());
172
174}
175
177 std::vector<ParticleChange> &diff) {
179
180 for (auto &c : local_cells()) {
181 for (auto it = c->particles().begin(); it != c->particles().end();) {
183
185
186 /* Particle is in place */
187 if (target_cell == c) {
188 std::advance(it, 1);
189 continue;
190 }
191
192 auto p = std::move(*it);
193 it = c->particles().erase(it);
194 diff.emplace_back(ModifiedList{c->particles()});
195
196 /* Particle is not local */
197 if (target_cell == nullptr) {
198 diff.emplace_back(RemovedParticle{p.id()});
199 displaced_parts.insert(std::move(p));
200 }
201 /* Particle belongs on this node but is in the wrong cell. */
202 else if (target_cell != c) {
203 target_cell->particles().insert(std::move(p));
204 diff.emplace_back(ModifiedList{target_cell->particles()});
205 }
206 }
207 }
208
209 if (global) {
210 auto const grid = Utils::Mpi::cart_get<3>(m_comm).dims;
211 /* Worst case we need grid - 1 rounds per direction.
212 * This correctly implies that if there is only one node,
213 * no action should be taken. */
214 int rounds_left = grid[0] + grid[1] + grid[2] - 3;
215 for (; rounds_left > 0; rounds_left--) {
216 exchange_neighbors(displaced_parts, diff);
217
218 auto left_over = boost::mpi::all_reduce(m_comm, displaced_parts.size(),
219 std::plus<std::size_t>());
220
221 if (left_over == 0) {
222 break;
223 }
224 }
225 } else {
226 exchange_neighbors(displaced_parts, diff);
227 }
228
229 if (not displaced_parts.empty()) {
230 auto sort_cell = local_cells()[0];
231
232 for (auto &part : displaced_parts) {
233 runtimeErrorMsg() << "Particle " << part.id() << " moved more "
234 << "than one local box length in one timestep";
235 sort_cell->particles().insert(std::move(part));
236
237 diff.emplace_back(ModifiedList{sort_cell->particles()});
238 }
239 }
240}
241
242void RegularDecomposition::mark_cells() {
243 m_local_cells.clear();
244 m_ghost_cells.clear();
245
246 int cnt_c = 0;
247 for (int o = 0; o < ghost_cell_grid[2]; o++)
248 for (int n = 0; n < ghost_cell_grid[1]; n++)
249 for (int m = 0; m < ghost_cell_grid[0]; m++) {
250 if ((m > 0 && m < ghost_cell_grid[0] - 1 && n > 0 &&
251 n < ghost_cell_grid[1] - 1 && o > 0 && o < ghost_cell_grid[2] - 1))
252 m_local_cells.push_back(&cells.at(cnt_c++));
253 else
254 m_ghost_cells.push_back(&cells.at(cnt_c++));
255 }
256}
257
258void RegularDecomposition::fill_comm_cell_lists(ParticleList **part_lists,
259 Utils::Vector3i const &lc,
260 Utils::Vector3i const &hc) {
261 for (int o = lc[0]; o <= hc[0]; o++)
262 for (int n = lc[1]; n <= hc[1]; n++)
263 for (int m = lc[2]; m <= hc[2]; m++) {
264 auto const i = Utils::get_linear_index(o, n, m, ghost_cell_grid);
265
266 *part_lists++ = &(cells.at(i).particles());
267 }
268}
269
271 auto dir_max_range = [this](unsigned int i) {
272 return std::min(0.5 * m_box.length()[i], m_local_box.length()[i]);
273 };
274
275 return {dir_max_range(0u), dir_max_range(1u), dir_max_range(2u)};
276}
277
279int RegularDecomposition::calc_processor_min_num_cells() const {
280 /* the minimal number of cells can be lower if there are at least two nodes
281 serving a direction,
282 since this also ensures that the cell size is at most half the box
283 length. However, if there is only one processor for a direction, there
284 have to be at least two cells for this direction. */
285 return boost::accumulate(Utils::Mpi::cart_get<3>(m_comm).dims, 1,
286 [](int n_cells, int grid) {
287 return (grid == 1) ? 2 * n_cells : n_cells;
288 });
289}
290
291void RegularDecomposition::create_cell_grid(double range) {
292 auto const cart_info = Utils::Mpi::cart_get<3>(m_comm);
293
294 int n_local_cells;
295 auto cell_range = Utils::Vector3d::broadcast(range);
296 auto const min_num_cells = calc_processor_min_num_cells();
297
298 if (range <= 0.) {
299 /* this is the non-interacting case */
300 auto const cells_per_dir =
301 static_cast<int>(std::ceil(std::cbrt(min_num_cells)));
302
305 } else {
306 /* Calculate initial cell grid */
307 auto const &local_box_l = m_local_box.length();
308 auto const volume = Utils::product(local_box_l);
309 auto const scale = std::cbrt(RegularDecomposition::max_num_cells / volume);
310
311 for (auto i = 0u; i < 3u; i++) {
312 /* this is at least 1 */
313 cell_grid[i] = static_cast<int>(std::ceil(local_box_l[i] * scale));
314 cell_range[i] = local_box_l[i] / static_cast<double>(cell_grid[i]);
315
316 if (cell_range[i] < range) {
317 /* ok, too many cells for this direction, set to minimum */
318 cell_grid[i] = static_cast<int>(std::floor(local_box_l[i] / range));
319 if (cell_grid[i] < 1) {
321 << "interaction range " << range << " in direction " << i
322 << " is larger than the local box size " << local_box_l[i];
323 cell_grid[i] = 1;
324 }
325 cell_range[i] = local_box_l[i] / static_cast<double>(cell_grid[i]);
326 }
327 }
328
329 /* It may be necessary to asymmetrically assign the scaling to the
330 coordinates, which the above approach will not do.
331 For a symmetric box, it gives a symmetric result. Here we correct that.
332 */
333 for (;;) {
335
336 /* done */
337 if (n_local_cells <= RegularDecomposition::max_num_cells)
338 break;
339
340 /* find coordinate with the smallest cell range */
341 auto min_ind = 0u;
342 auto min_size = cell_range[0];
343
344 for (auto i = 1u; i < 3u; ++i) {
345 if (cell_grid[i] > 1 and cell_range[i] < min_size) {
346 min_ind = i;
347 min_size = cell_range[i];
348 }
349 }
350
353 }
354
355 /* sanity check */
357 runtimeErrorMsg() << "number of cells " << n_local_cells
358 << " is smaller than minimum " << min_num_cells
359 << ": either interaction range is too large for "
360 << "the current skin (range=" << range << ", "
361 << "half_local_box_l=[" << local_box_l / 2. << "]) "
362 << "or min_num_cells too large";
363 }
364 }
365
366 if (n_local_cells > RegularDecomposition::max_num_cells) {
367 runtimeErrorMsg() << "no suitable cell grid found";
368 }
369
370 auto const node_pos = cart_info.coords;
371
372 /* now set all dependent variables */
373 int new_cells = 1;
374 for (auto i = 0u; i < 3u; i++) {
375 ghost_cell_grid[i] = cell_grid[i] + 2;
377 cell_size[i] = m_local_box.length()[i] / static_cast<double>(cell_grid[i]);
378 inv_cell_size[i] = 1.0 / cell_size[i];
379 cell_offset[i] = node_pos[i] * cell_grid[i];
380 }
381
382 /* allocate cell array and cell pointer arrays */
383 cells.clear();
384 cells.resize(static_cast<unsigned int>(new_cells));
387}
388
389template <class K, class Comparator> auto make_flat_set(Comparator &&comp) {
390 return boost::container::flat_set<K, std::remove_reference_t<Comparator>>(
391 std::forward<Comparator>(comp));
392}
393
394void RegularDecomposition::init_cell_interactions() {
395
396 // Note: the global index for physical cells is 0-based.
397 // I.e., a global index of -1 refers to a ghost cell.
398 auto const halo = Utils::Vector3i{1, 1, 1}; // number of ghost layers
399 auto const cart_info = Utils::Mpi::cart_get<3>(m_comm);
400 // 3D index of the MPI rank in the Cartesian grid of MPI ranks
401 auto const &node_pos = cart_info.coords;
402 // size of the Cartesian grid of MPI ranks
403 auto const &node_grid = ::communicator.node_grid;
405 // MD cell index of lower halo layer on this MPI rank
406 auto const global_size = hadamard_product(node_grid, cell_grid);
407
408 // is a cell at the system boundary in the given coord
410 return (cell_idx[coord] == 0 or cell_idx[coord] == global_size[coord] - 1);
411 };
412
413 // For the fully connected feature (cells that don't share at least a corner)
414 // only apply if one cell is a ghost cell (i.e. connections across the
415 // periodic boundary.
417 Utils::Vector3i b) {
419 auto const [fc_normal, fc_dir] = *fully_connected_boundary();
420 auto const involves_ghost_cell =
422 b[fc_normal] == -1 or b[fc_normal] == global_size[fc_normal]);
424 // check if cells do not share at least a corner
425 return std::abs((a - b)[fc_dir]) > 1;
426 }
427 }
428 return false;
429 };
430
431 /* Translate a node local index (relative to the origin of the local grid)
432 * to a global index. */
433 auto global_index =
436 };
437
438 /* Linear index in the global cell grid. */
441
443 };
444
445 /* Translate a global index into a local one */
446 auto local_index =
449 };
450
451 // sanity checks
453 auto const [fc_normal, fc_dir] = *fully_connected_boundary();
454 if (fc_normal == fc_dir) {
455 throw std::domain_error("fully_connected_boundary normal and connection "
456 "coordinates need to differ.");
457 }
458 if (node_grid[fc_dir] != 1) {
459 throw std::runtime_error(
460 "The MPI nodegrid must be 1 in the fully connected direction.");
461 }
463 throw std::runtime_error(
464 "The fully connected boundary requires periodicity in the "
465 "boundary normal direction.");
466 }
467 }
468
469 /* We only consider local cells (e.g. not halo cells), which
470 * span the range [(1,1,1), cell_grid) in local coordinates. */
471 auto const start = global_index(Utils::Vector3i{1, 1, 1});
472 auto const end = start + cell_grid;
473
474 bool one_mpi_rank = m_comm.size() == 1;
475
476 /* loop all local cells */
477 for (int o = start[2]; o < end[2]; o++)
478 for (int n = start[1]; n < end[1]; n++)
479 for (int m = start[0]; m < end[0]; m++) {
480 /* next-nearest neighbors in every direction */
481 Utils::Vector3i lower_index = {m - 1, n - 1, o - 1};
482 Utils::Vector3i upper_index = {m + 1, n + 1, o + 1};
483
484 /* In the fully connected case, we consider all cells
485 * in the direction as neighbors, not only the nearest ones.
486 // */
489
490 // Fully connected is only needed at the box surface
491 if (at_boundary(fc_boundary, {m, n, o})) {
494 }
495 }
496
497 /* In non-periodic directions, the halo needs not
498 * be considered. */
499 for (auto i = 0u; i < 3u; i++) {
500 if (not m_box.periodic(i)) {
501 lower_index[i] = std::max(0, lower_index[i]);
502 upper_index[i] = std::min(global_size[i] - 1, upper_index[i]);
503 }
504 }
505
506 /* Unique set of neighbors, cells are compared by their linear
507 * index in the global cell grid. */
508 auto neighbors = make_flat_set<Utils::Vector3i>(
509 [&](Utils::Vector3i const &a, Utils::Vector3i const &b) {
511 });
512
513 /* Collect neighbors */
514 for (int p = lower_index[2]; p <= upper_index[2]; p++)
515 for (int q = lower_index[1]; q <= upper_index[1]; q++)
516 for (int r = lower_index[0]; r <= upper_index[0]; r++) {
518 // Avoid fully connecting the boundary layer and the
519 // next INNER layer
520 if (fcb_is_inner_connection({m, n, o}, {r, q, p}))
521 continue;
522 }
523 neighbors.insert(Utils::Vector3i{r, q, p});
524 }
525
526 /* Red-black partition by global index. */
527 auto const ind1 = folded_linear_index({m, n, o});
528
529 std::vector<Cell *> red_neighbors;
530 std::vector<Cell *> black_neighbors;
531
532 /* If we are running on a single MPI rank, it is not necessary to use
533 * ghost cells. Instead of adding a ghost cell as neighbor,
534 * we directly connect to the corresponding
535 * physical cell across the periodic boundary */
536 for (auto &neighbor : neighbors) {
537 if (one_mpi_rank) {
538 for (auto coord : {0u, 1u, 2u}) {
539 if (neighbor[coord] == -1) {
541 } else if (neighbor[coord] == cell_grid[coord]) {
543 }
544 }
545 }
546 auto const ind2 = folded_linear_index(neighbor);
547 /* Exclude cell itself */
548 if (ind1 == ind2)
549 continue;
550
551 auto cell = &cells.at(
553
554 // Divide red and black neighbors
555 if (ind2 > ind1) {
556 red_neighbors.push_back(cell);
557 } else {
558 black_neighbors.push_back(cell);
559 }
560 }
561
562 // Assign neighbors to the cell
565 }
566}
567
568namespace {
569/** Revert the order of a communicator: After calling this the
570 * communicator is working in reverted order with exchanged
571 * communication types GHOST_SEND <-> GHOST_RECV.
572 */
574 /* revert order */
575 boost::reverse(comm.communications);
576
577 /* exchange SEND/RECV */
578 for (auto &c : comm.communications) {
579 if (c.type == GHOST_SEND)
580 c.type = GHOST_RECV;
581 else if (c.type == GHOST_RECV)
582 c.type = GHOST_SEND;
583 else if (c.type == GHOST_LOCL) {
584 boost::reverse(c.part_lists);
585 }
586 }
587}
588
589/** Of every two communication rounds, set the first receivers to prefetch and
590 * poststore
591 */
593 for (auto it = comm.communications.begin(); it != comm.communications.end();
594 it += 2) {
595 auto next = std::next(it);
596 if (it->type == GHOST_RECV && next->type == GHOST_SEND) {
599 }
600 }
601}
602
603} // namespace
604
605GhostCommunicator RegularDecomposition::prepare_comm() {
606 // When running on a single MPI rank, the ghost cells are not needed,
607 // as the corresponding physical cell is available on the same MPI rank.
608 // The cell neighborships are set up such that cells across the periodic
609 // boundaries are connected as neighbors directly.
610 if (m_comm.size() == 1)
611 return {};
612
613 int dir, lr, i, cnt, n_comm_cells[3];
614 Utils::Vector3i lc{}, hc{}, done{};
615
616 auto const comm_info = Utils::Mpi::cart_get<3>(m_comm);
617 auto const node_neighbors = Utils::Mpi::cart_neighbors<3>(m_comm);
618
619 /* calculate number of communications */
620 std::size_t num = 0;
621 for (dir = 0; dir < 3; dir++) {
622 for (lr = 0; lr < 2; lr++) {
623 /* No communication for border of non periodic direction */
624 if (comm_info.dims[dir] == 1)
625 num++;
626 else
627 num += 2;
628 }
629 }
630
631 /* prepare communicator */
633
634 /* number of cells to communicate in a direction */
635 n_comm_cells[0] = cell_grid[1] * cell_grid[2];
638
639 cnt = 0;
640 /* direction loop: x, y, z */
641 for (dir = 0; dir < 3; dir++) {
642 lc[(dir + 1) % 3] = 1 - done[(dir + 1) % 3];
643 lc[(dir + 2) % 3] = 1 - done[(dir + 2) % 3];
644 hc[(dir + 1) % 3] = cell_grid[(dir + 1) % 3] + done[(dir + 1) % 3];
645 hc[(dir + 2) % 3] = cell_grid[(dir + 2) % 3] + done[(dir + 2) % 3];
646 /* lr loop: left right */
647 /* here we could in principle build in a one sided ghost
648 communication, simply by taking the lr loop only over one
649 value */
650 for (lr = 0; lr < 2; lr++) {
651 if (comm_info.dims[dir] == 1) {
652 /* just copy cells on a single node */
653 ghost_comm.communications[cnt].type = GHOST_LOCL;
654 ghost_comm.communications[cnt].node = m_comm.rank();
655
656 /* Buffer has to contain Send and Recv cells -> factor 2 */
657 ghost_comm.communications[cnt].part_lists.resize(2 * n_comm_cells[dir]);
658
659 /* fill send ghost_comm cells */
660 lc[dir] = hc[dir] = 1 + lr * (cell_grid[dir] - 1);
661
662 fill_comm_cell_lists(ghost_comm.communications[cnt].part_lists.data(),
663 lc, hc);
664
665 /* fill recv ghost_comm cells */
666 lc[dir] = hc[dir] = 0 + (1 - lr) * (cell_grid[dir] + 1);
667
668 /* place receive cells after send cells */
669 fill_comm_cell_lists(
670 &ghost_comm.communications[cnt].part_lists[n_comm_cells[dir]], lc,
671 hc);
672
673 cnt++;
674 } else {
675 /* i: send/recv loop */
676 for (i = 0; i < 2; i++) {
677 if ((comm_info.coords[dir] + i) % 2 == 0) {
678 ghost_comm.communications[cnt].type = GHOST_SEND;
679 ghost_comm.communications[cnt].node = node_neighbors[2 * dir + lr];
680 ghost_comm.communications[cnt].part_lists.resize(n_comm_cells[dir]);
681 /* prepare folding of ghost positions */
682
683 lc[dir] = hc[dir] = 1 + lr * (cell_grid[dir] - 1);
684
685 fill_comm_cell_lists(
686 ghost_comm.communications[cnt].part_lists.data(), lc, hc);
687 cnt++;
688 }
689 if ((comm_info.coords[dir] + (1 - i)) % 2 == 0) {
690 ghost_comm.communications[cnt].type = GHOST_RECV;
691 ghost_comm.communications[cnt].node =
692 node_neighbors[2 * dir + (1 - lr)];
693 ghost_comm.communications[cnt].part_lists.resize(n_comm_cells[dir]);
694
695 lc[dir] = hc[dir] = (1 - lr) * (cell_grid[dir] + 1);
696
697 fill_comm_cell_lists(
698 ghost_comm.communications[cnt].part_lists.data(), lc, hc);
699 cnt++;
700 }
701 }
702 }
703 done[dir] = 1;
704 }
705 }
706
707 return ghost_comm;
708}
709
711 boost::mpi::communicator comm, double range, BoxGeometry const &box_geo,
712 LocalBox const &local_geo,
713 std::optional<std::pair<int, int>> fully_connected)
714 : m_comm(std::move(comm)), m_box(box_geo), m_local_box(local_geo),
715 m_fully_connected_boundary(std::move(fully_connected)) {
716
717 /* set up new regular decomposition cell structure */
718 create_cell_grid(range);
719
720 /* setup cell neighbors */
721 init_cell_interactions();
722
723 /* mark local and ghost cells */
724 mark_cells();
725
726 /* create communicators */
727 m_exchange_ghosts_comm = prepare_comm();
728 m_collect_ghost_force_comm = prepare_comm();
729
730 /* collect forces has to be done in reverted order! */
731 revert_comm_order(m_collect_ghost_force_comm);
732
733 assign_prefetches(m_exchange_ghosts_comm);
734 assign_prefetches(m_collect_ghost_force_comm);
735}
static int coord(std::string const &s)
auto make_flat_set(Comparator &&comp)
static void fold_and_reset(Particle &p, BoxGeometry const &box_geo)
Fold coordinates to box and reset the old position.
Vector implementation and trait types for boost qvm interoperability.
T get_mi_coord(T a, T b, unsigned coord) const
Get the minimum-image distance between two coordinates.
Utils::Vector3d const & length() const
Box length.
constexpr bool periodic(unsigned coord) const
Check periodicity in direction.
void fold_position(Utils::Vector3d &pos, Utils::Vector3i &image_box) const
Fold coordinates to primary simulation box in-place.
auto const & my_right() const
Right (top, back) corner of this nodes local box.
Definition LocalBox.hpp:47
auto const & boundary() const
Boundary information for the local box.
Definition LocalBox.hpp:59
auto const & my_left() const
Left (bottom, front) corner of this nodes local box.
Definition LocalBox.hpp:45
auto const & length() const
Dimensions of the box a single node is responsible for.
Definition LocalBox.hpp:49
T & insert(T const &v)
Insert an element into the container.
Definition Bag.hpp:147
DEVICE_QUALIFIER constexpr size_type size() const noexcept
Definition Array.hpp:166
static DEVICE_QUALIFIER constexpr Vector< T, N > broadcast(typename Base::value_type const &value) noexcept
Create a vector that has all entries set to the same value.
Definition Vector.hpp:131
cudaStream_t stream[1]
CUDA streams for parallel computing on CPU and GPU.
Communicator communicator
This file contains the errorhandling code for severe errors, like a broken bond or illegal parameter ...
#define runtimeErrorMsg()
#define GHOST_RECV
recv from a single node
Definition ghosts.hpp:108
#define GHOST_PSTSTORE
additional flag for poststoring
Definition ghosts.hpp:121
#define GHOST_LOCL
transfer data from cell to cell on this node
Definition ghosts.hpp:114
#define GHOST_SEND
send to a single node
Definition ghosts.hpp:106
#define GHOST_PREFETCH
additional flag for prefetching
Definition ghosts.hpp:119
std::array< mpi::request, 2 > isendrecv(mpi::communicator const &comm, int dest, int stag, const T &sval, int src, int rtag, T &rval)
Definition sendrecv.hpp:73
mpi::status sendrecv(mpi::communicator const &comm, int dest, int stag, const T &sval, int src, int rtag, T &rval)
Definition sendrecv.hpp:66
T product(Vector< T, N > const &v)
Definition Vector.hpp:372
int get_linear_index(int a, int b, int c, const Vector3i &adim)
Definition index.hpp:35
auto hadamard_product(Vector< T, N > const &a, Vector< U, N > const &b)
Definition Vector.hpp:377
void assign_prefetches(GhostCommunicator &comm)
Of every two communication rounds, set the first receivers to prefetch and poststore.
void revert_comm_order(GhostCommunicator &comm)
Revert the order of a communicator: After calling this the communicator is working in reverted order ...
STL namespace.
Utils::Vector3i node_grid
Properties for a ghost communication.
Definition ghosts.hpp:159
std::vector< GhostCommunication > communications
List of ghost communications.
Definition ghosts.hpp:168
Struct holding all information for one particle.
Definition Particle.hpp:435
auto & pos_at_last_verlet_update()
Definition Particle.hpp:482
auto const & image_box() const
Definition Particle.hpp:484
auto const & pos() const
Definition Particle.hpp:471
GhostCommunicator m_exchange_ghosts_comm
Utils::Vector3i ghost_cell_grid
linked cell grid with ghost frame.
Utils::Vector3d max_cutoff() const override
Utils::Vector3d inv_cell_size
inverse cell_size.
std::vector< Cell * > m_ghost_cells
BoxGeometry const & m_box
Cell * particle_to_cell(Particle const &p) override
void resort(bool global, std::vector< ParticleChange > &diff) override
std::vector< Cell > cells
RegularDecomposition(boost::mpi::communicator comm, double range, BoxGeometry const &box_geo, LocalBox const &local_geo, std::optional< std::pair< int, int > > fully_connected)
Utils::Vector3d cell_size
Cell size.
std::span< Cell *const > local_cells() const override
std::vector< Cell * > m_local_cells
Utils::Vector3d max_range() const override
GhostCommunicator m_collect_ghost_force_comm
Utils::Vector3i cell_grid
Grid dimensions per node.
Utils::Vector3i cell_offset
Offset in global grid.
boost::mpi::communicator m_comm