26#include "communication.hpp"
29#include "system/System.hpp"
36#include <boost/container/flat_set.hpp>
37#include <boost/mpi/collectives/all_reduce.hpp>
38#include <boost/mpi/communicator.hpp>
39#include <boost/mpi/request.hpp>
40#include <boost/range/algorithm/reverse.hpp>
41#include <boost/range/numeric.hpp>
48#include <initializer_list>
53int RegularDecomposition::position_to_cell_index(
57 for (
auto i = 0
u; i < 3u; i++) {
84void RegularDecomposition::move_if_local(
87 for (
auto &part :
src) {
94 rest.
insert(std::move(part));
111 for (
auto it =
src.begin();
it !=
src.end();) {
112 auto const pos =
it->pos()[dir];
117 left.insert(std::move(*
it));
125void RegularDecomposition::exchange_neighbors(
130 for (
int dir = 0; dir < 3; dir++) {
132 if (Utils::Mpi::cart_get<3>(
m_comm).dims[dir] == 1) {
137 if (Utils::Mpi::cart_get<3>(
m_comm).dims[dir] == 2) {
145 using boost::mpi::request;
156 boost::mpi::wait_all(
reqs.begin(),
reqs.end());
177 std::vector<ParticleChange> &
diff) {
181 for (
auto it = c->particles().begin();
it != c->particles().end();) {
192 auto p = std::move(*
it);
193 it = c->particles().erase(
it);
210 auto const grid = Utils::Mpi::cart_get<3>(
m_comm).dims;
219 std::plus<std::size_t>());
234 <<
"than one local box length in one timestep";
235 sort_cell->particles().insert(std::move(part));
242void RegularDecomposition::mark_cells() {
258void RegularDecomposition::fill_comm_cell_lists(
ParticleList **part_lists,
261 for (
int o =
lc[0];
o <=
hc[0];
o++)
262 for (
int n =
lc[1]; n <=
hc[1]; n++)
263 for (
int m =
lc[2]; m <=
hc[2]; m++) {
266 *part_lists++ = &(
cells.at(i).particles());
279int RegularDecomposition::calc_processor_min_num_cells()
const {
285 return boost::accumulate(Utils::Mpi::cart_get<3>(
m_comm).dims, 1,
291void RegularDecomposition::create_cell_grid(
double range) {
309 auto const scale = std::cbrt(RegularDecomposition::max_num_cells / volume);
311 for (
auto i = 0
u; i < 3u; i++) {
316 if (cell_range[i] <
range) {
321 <<
"interaction range " <<
range <<
" in direction " << i
322 <<
" is larger than the local box size " <<
local_box_l[i];
344 for (
auto i = 1u; i < 3u; ++i) {
359 <<
": either interaction range is too large for "
360 <<
"the current skin (range=" <<
range <<
", "
361 <<
"half_local_box_l=[" <<
local_box_l / 2. <<
"]) "
362 <<
"or min_num_cells too large";
374 for (
auto i = 0
u; i < 3u; i++) {
390 return boost::container::flat_set<K, std::remove_reference_t<Comparator>>(
391 std::forward<Comparator>(
comp));
394void RegularDecomposition::init_cell_interactions() {
425 return std::abs((a - b)[
fc_dir]) > 1;
455 throw std::domain_error(
"fully_connected_boundary normal and connection "
456 "coordinates need to differ.");
458 if (node_grid[
fc_dir] != 1) {
459 throw std::runtime_error(
460 "The MPI nodegrid must be 1 in the fully connected direction.");
463 throw std::runtime_error(
464 "The fully connected boundary requires periodicity in the "
465 "boundary normal direction.");
477 for (
int o = start[2];
o < end[2];
o++)
478 for (
int n = start[1]; n < end[1]; n++)
479 for (
int m = start[0]; m < end[0]; m++) {
499 for (
auto i = 0
u; i < 3u; i++) {
538 for (
auto coord : {0
u, 1u, 2u}) {
551 auto cell = &
cells.at(
584 boost::reverse(c.part_lists);
595 auto next = std::next(
it);
621 for (dir = 0; dir < 3; dir++) {
622 for (
lr = 0;
lr < 2;
lr++) {
641 for (dir = 0; dir < 3; dir++) {
642 lc[(dir + 1) % 3] = 1 -
done[(dir + 1) % 3];
643 lc[(dir + 2) % 3] = 1 -
done[(dir + 2) % 3];
650 for (
lr = 0;
lr < 2;
lr++) {
662 fill_comm_cell_lists(
ghost_comm.communications[
cnt].part_lists.data(),
669 fill_comm_cell_lists(
676 for (i = 0; i < 2; i++) {
677 if ((
comm_info.coords[dir] + i) % 2 == 0) {
685 fill_comm_cell_lists(
689 if ((
comm_info.coords[dir] + (1 - i)) % 2 == 0) {
697 fill_comm_cell_lists(
714 : m_comm(
std::
move(comm)), m_box(box_geo), m_local_box(local_geo),
718 create_cell_grid(
range);
721 init_cell_interactions();
static int coord(std::string const &s)
auto make_flat_set(Comparator &&comp)
static void fold_and_reset(Particle &p, BoxGeometry const &box_geo)
Fold coordinates to box and reset the old position.
Vector implementation and trait types for boost qvm interoperability.
T get_mi_coord(T a, T b, unsigned coord) const
Get the minimum-image distance between two coordinates.
Utils::Vector3d const & length() const
Box length.
constexpr bool periodic(unsigned coord) const
Check periodicity in direction.
void fold_position(Utils::Vector3d &pos, Utils::Vector3i &image_box) const
Fold coordinates to primary simulation box in-place.
auto const & my_right() const
Right (top, back) corner of this nodes local box.
auto const & boundary() const
Boundary information for the local box.
auto const & my_left() const
Left (bottom, front) corner of this nodes local box.
auto const & length() const
Dimensions of the box a single node is responsible for.
T & insert(T const &v)
Insert an element into the container.
DEVICE_QUALIFIER constexpr size_type size() const noexcept
static DEVICE_QUALIFIER constexpr Vector< T, N > broadcast(typename Base::value_type const &value) noexcept
Create a vector that has all entries set to the same value.
cudaStream_t stream[1]
CUDA streams for parallel computing on CPU and GPU.
Communicator communicator
This file contains the errorhandling code for severe errors, like a broken bond or illegal parameter ...
#define runtimeErrorMsg()
#define GHOST_RECV
recv from a single node
#define GHOST_PSTSTORE
additional flag for poststoring
#define GHOST_LOCL
transfer data from cell to cell on this node
#define GHOST_SEND
send to a single node
#define GHOST_PREFETCH
additional flag for prefetching
std::array< mpi::request, 2 > isendrecv(mpi::communicator const &comm, int dest, int stag, const T &sval, int src, int rtag, T &rval)
mpi::status sendrecv(mpi::communicator const &comm, int dest, int stag, const T &sval, int src, int rtag, T &rval)
T product(Vector< T, N > const &v)
int get_linear_index(int a, int b, int c, const Vector3i &adim)
auto hadamard_product(Vector< T, N > const &a, Vector< U, N > const &b)
void assign_prefetches(GhostCommunicator &comm)
Of every two communication rounds, set the first receivers to prefetch and poststore.
void revert_comm_order(GhostCommunicator &comm)
Revert the order of a communicator: After calling this the communicator is working in reverted order ...
Utils::Vector3i node_grid
Properties for a ghost communication.
std::vector< GhostCommunication > communications
List of ghost communications.
Struct holding all information for one particle.
auto & pos_at_last_verlet_update()
auto const & image_box() const
GhostCommunicator m_exchange_ghosts_comm
Utils::Vector3i ghost_cell_grid
linked cell grid with ghost frame.
Utils::Vector3d max_cutoff() const override
Utils::Vector3d inv_cell_size
inverse cell_size.
std::vector< Cell * > m_ghost_cells
BoxGeometry const & m_box
Cell * particle_to_cell(Particle const &p) override
auto fully_connected_boundary() const
void resort(bool global, std::vector< ParticleChange > &diff) override
std::vector< Cell > cells
RegularDecomposition(boost::mpi::communicator comm, double range, BoxGeometry const &box_geo, LocalBox const &local_geo, std::optional< std::pair< int, int > > fully_connected)
Utils::Vector3d cell_size
Cell size.
std::span< Cell *const > local_cells() const override
std::vector< Cell * > m_local_cells
Utils::Vector3d max_range() const override
GhostCommunicator m_collect_ghost_force_comm
Utils::Vector3i cell_grid
Grid dimensions per node.
Utils::Vector3i cell_offset
Offset in global grid.
boost::mpi::communicator m_comm