ESPResSo
Extensible Simulation Package for Research on Soft Matter Systems
Loading...
Searching...
No Matches
communication.cpp
Go to the documentation of this file.
1/*
2 * Copyright (C) 2010-2022 The ESPResSo project
3 * Copyright (C) 2002,2003,2004,2005,2006,2007,2008,2009,2010
4 * Max-Planck-Institute for Polymer Research, Theory Group
5 *
6 * This file is part of ESPResSo.
7 *
8 * ESPResSo is free software: you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation, either version 3 of the License, or
11 * (at your option) any later version.
12 *
13 * ESPResSo is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 */
21
22#include <config/config.hpp>
23
24#include "communication.hpp"
25
26#include "cuda/init.hpp"
27#include "errorhandling.hpp"
28#include "fft/init.hpp"
29
30#ifdef ESPRESSO_WALBERLA
32#endif
33
34#ifdef ESPRESSO_SHARED_MEMORY_PARALLELISM
35#include <Cabana_Core.hpp>
36#include <Kokkos_Core.hpp>
37#include <omp.h>
38#endif
39
40#include <utils/Vector.hpp>
42
43#include <boost/mpi.hpp>
44#include <boost/mpi/communicator.hpp>
45#include <boost/mpi/environment.hpp>
46
47#include <mpi.h>
48#if defined(OPEN_MPI)
49#include <mpi-ext.h>
50#endif
51
52#include <cassert>
53#include <cstdlib>
54#include <cstring>
55#include <memory>
56#include <optional>
57#include <string>
58#include <tuple>
59#include <utility>
60
61#ifdef ESPRESSO_SHARED_MEMORY_PARALLELISM
63 KokkosHandle() { Kokkos::initialize(); }
64 ~KokkosHandle() { Kokkos::finalize(); }
65};
66#endif
67
68boost::mpi::communicator comm_cart;
70std::unique_ptr<CommunicationEnvironment> communication_environment{};
71#ifdef ESPRESSO_SHARED_MEMORY_PARALLELISM
72std::shared_ptr<KokkosHandle> kokkos_handle{};
73#endif
74int this_node = -1;
75
76[[maybe_unused]] static auto get_env_variable(char const *const name) {
77 char const *const value = std::getenv(name);
78 std::optional<std::string> result{std::nullopt};
79 if (value) {
80 result = std::string(value);
81 }
82 return result;
83}
84
85static auto make_default_mpi_env() {
86 int argc = 0;
87 char **argv = nullptr;
88 return std::make_shared<boost::mpi::environment>(argc, argv);
89}
90
93
95 std::shared_ptr<boost::mpi::environment> mpi_env)
96 : m_mpi_env{std::move(mpi_env)} {
97#ifdef ESPRESSO_SHARED_MEMORY_PARALLELISM
98 auto const num_threads_env = get_env_variable("OMP_NUM_THREADS");
99 if (not num_threads_env or num_threads_env->empty()) {
100 omp_set_num_threads(1);
101 }
102#endif
103
104 m_is_mpi_gpu_aware = false;
105
106#if defined(OPEN_MPI)
107#if defined(OMPI_HAVE_MPI_EXT_ROCM) && OMPI_HAVE_MPI_EXT_ROCM
108 m_is_mpi_gpu_aware |= static_cast<bool>(MPIX_Query_rocm_support());
109#endif
110#if defined(OMPI_HAVE_MPI_EXT_CUDA) && OMPI_HAVE_MPI_EXT_CUDA
111 m_is_mpi_gpu_aware |= static_cast<bool>(MPIX_Query_cuda_support());
112#endif
113#endif // defined(OPEN_MPI)
114
115#if defined(MPICH)
116 auto const mpich_gpu_env = get_env_variable("MPIR_CVAR_ENABLE_GPU");
117 m_is_mpi_gpu_aware |= (mpich_gpu_env and *mpich_gpu_env == "1");
118#endif // defined(MPICH)
119
120#if defined(_CRAYC) or defined(__cray__)
121 auto const cray_mpich_gpu_env = get_env_variable("MPICH_GPU_SUPPORT_ENABLED");
122 m_is_mpi_gpu_aware |= (cray_mpich_gpu_env and *cray_mpich_gpu_env == "1");
123#endif // defined(_CRAYC) or defined(__cray__)
124
126
127 m_callbacks =
128 std::make_shared<Communication::MpiCallbacks>(comm_cart, m_mpi_env);
129
131
132#ifdef ESPRESSO_WALBERLA
134#endif
135
136#ifdef ESPRESSO_CUDA
138#endif
139
140#ifdef ESPRESSO_FFTW
142#endif
143
144#ifdef ESPRESSO_SHARED_MEMORY_PARALLELISM
145 kokkos_handle = std::make_shared<KokkosHandle>();
146#endif
147}
148
150#ifdef ESPRESSO_SHARED_MEMORY_PARALLELISM
151 Kokkos::fence();
152 kokkos_handle.reset();
153#endif
154
155#ifdef ESPRESSO_WALBERLA
157#endif
158
160 m_callbacks.reset();
161}
162
164 : comm{::comm_cart}, node_grid{}, this_node{::this_node}, size{-1} {}
165
167 auto constexpr reorder = false;
169 this_node = comm.rank();
170 // check topology validity
171 std::ignore = Utils::Mpi::cart_neighbors<3>(comm);
172}
173
175 assert(this_node == -1);
176 assert(size == -1);
177 MPI_Comm_size(MPI_COMM_WORLD, &size);
178 node_grid = Utils::Mpi::dims_create<3>(size);
180}
181
183 node_grid = value;
185}
186
188 return Utils::Mpi::cart_coords<3>(comm, this_node);
189}
190
191void mpi_loop() {
192 if (this_node != 0)
194}
Vector implementation and trait types for boost qvm interoperability.
void loop() const
Start the MPI loop.
void mpi_loop()
Process requests from head node.
std::shared_ptr< KokkosHandle > kokkos_handle
Communicator communicator
static auto get_env_variable(char const *const name)
boost::mpi::communicator comm_cart
The communicator.
static auto make_default_mpi_env()
int this_node
The number of this node.
std::unique_ptr< CommunicationEnvironment > communication_environment
std::shared_ptr< KokkosHandle > kokkos_handle
Communicator communicator
boost::mpi::communicator comm_cart
The communicator.
void cuda_on_program_start()
Called on program start.
Definition cuda/init.cpp:91
This file contains the errorhandling code for severe errors, like a broken bond or illegal parameter ...
void fft_on_program_start()
Definition fft/init.cpp:33
MpiCallbacks & mpiCallbacks()
Returns a reference to the global callback class instance.
void init_error_handling(boost::mpi::communicator const &comm)
Initialize the error collection system.
void deinit_error_handling()
boost::mpi::communicator cart_create(boost::mpi::communicator const &comm, Vector< int, dim > const &dims, bool reorder=true, Vector< int, dim > const &periodicity=Vector< int, dim >::broadcast(1))
Wrapper around MPI_Cart_create.
Definition cart_comm.hpp:54
STL namespace.
void mpi_deinit()
Release waLBerla's MPI manager and environment.
void mpi_init()
Initialize waLBerla's MPI manager and environment.
boost::mpi::communicator & comm
void full_initialization()
int size
The MPI world size.
int & this_node
The MPI rank.
Utils::Vector3i calc_node_index() const
Calculate the node index in the Cartesian topology.
Utils::Vector3i node_grid
void set_node_grid(Utils::Vector3i const &value)
Set new Cartesian topology.