Line data Source code
1 : /* +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
2 : Copyright (c) 2011-2023 The plumed team
3 : (see the PEOPLE file at the root of the distribution for a list of names)
4 :
5 : See http://www.plumed.org for more information.
6 :
7 : This file is part of plumed, version 2.
8 :
9 : plumed is free software: you can redistribute it and/or modify
10 : it under the terms of the GNU Lesser General Public License as published by
11 : the Free Software Foundation, either version 3 of the License, or
12 : (at your option) any later version.
13 :
14 : plumed is distributed in the hope that it will be useful,
15 : but WITHOUT ANY WARRANTY; without even the implied warranty of
16 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 : GNU Lesser General Public License for more details.
18 :
19 : You should have received a copy of the GNU Lesser General Public License
20 : along with plumed. If not, see <http://www.gnu.org/licenses/>.
21 : +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ */
22 : #ifndef __PLUMED_tools_Communicator_h
23 : #define __PLUMED_tools_Communicator_h
24 : #ifdef __PLUMED_HAS_MPI
25 : #include <mpi.h>
26 : #endif
27 : #include <cstdlib>
28 : #include "Exception.h"
29 : #include "TypesafePtr.h"
30 : #include <vector>
31 : #include <string>
32 : #include "Vector.h"
33 : #include "Tensor.h"
34 : #include "Matrix.h"
35 :
36 : namespace PLMD {
37 :
38 : #ifndef __PLUMED_HAS_MPI
39 : /// Surrogate of MPI_Comm when MPI library is not available
40 : class MPI_Comm {};
41 : /// Surrogate of MPI_Datatype when MPI library is not available
42 : class MPI_Datatype {};
43 : /// Surrogate of MPI_Status when MPI library is not available
44 : class MPI_Status {};
45 : /// Surrogate of MPI_Request when MPI library is not available
46 : class MPI_Request {};
47 : #endif
48 :
49 : /// \ingroup TOOLBOX
50 : /// Class containing wrappers to MPI.
51 : /// All the MPI related stuff is relegated here.
52 : class Communicator {
53 : /// Communicator
54 : MPI_Comm communicator;
55 : /// Function returning the MPI type.
56 : /// You can use it to access to the MPI type of a C++ type, e.g.
57 : /// `MPI_Datatype type=getMPIType<double>();`
58 : template <class T>
59 : static MPI_Datatype getMPIType();
60 : /// Structure defining a buffer for MPI.
61 : /// It contains info on the pointed data and its type and size. It is useful to
62 : /// allow wrapper of MPI functions where the triplet (buffer,type,size)
63 : /// is grouped into a single object. It can be built starting from
64 : /// different kinds of data. To implement compatibility of MPI wrappers
65 : /// with e.g. vectors, add constructors here.
66 : struct Data {
67 : void*pointer;
68 : int size;
69 : int nbytes=0;
70 : MPI_Datatype type;
71 : /// Init from pointer and size
72 2973678 : template <typename T> Data(T*p,int s): pointer(p), size(s), nbytes(sizeof(T)), type(getMPIType<T>()) {}
73 : /// Init from reference
74 14611412 : template <typename T> explicit Data(T&p): pointer(&p), size(1), nbytes(sizeof(T)), type(getMPIType<T>()) {}
75 : /// Init from pointer to VectorGeneric
76 11179 : template <unsigned n> explicit Data(VectorGeneric<n> *p,int s): pointer(p), size(n*s), nbytes(sizeof(double)), type(getMPIType<double>()) {}
77 : /// Init from reference to VectorGeneric
78 : template <unsigned n> explicit Data(VectorGeneric<n> &p): pointer(&p), size(n), nbytes(sizeof(double)), type(getMPIType<double>()) {}
79 : /// Init from pointer to TensorGeneric
80 16 : template <unsigned n,unsigned m> explicit Data(TensorGeneric<n,m> *p,int s): pointer(p), size(n*m*s), nbytes(sizeof(double)), type(getMPIType<double>()) {}
81 : /// Init from reference to TensorGeneric
82 48894 : template <unsigned n,unsigned m> explicit Data(TensorGeneric<n,m> &p): pointer(&p), size(n*m), nbytes(sizeof(double)), type(getMPIType<double>()) {}
83 : /// Init from reference to std::vector
84 2656644 : template <typename T> explicit Data(std::vector<T>&v) {
85 2645620 : Data d(v.data(),v.size());
86 2656815 : pointer=d.pointer;
87 2656815 : size=d.size;
88 2656815 : type=d.type;
89 2656644 : }
90 : /// Init from reference to PLMD::Matrix
91 : template <typename T> explicit Data(Matrix<T>&m ) {
92 : if(m.nrows()*m.ncols()>0) {
93 : Data d(&m(0,0),m.nrows()*m.ncols());
94 : pointer=d.pointer;
95 : size=d.size;
96 : type=d.type;
97 : } else {
98 : pointer=NULL;
99 : size=0;
100 : }
101 : }
102 : /// Init from reference to std::string
103 61 : explicit Data(std::string&s) {
104 61 : if(s.size()>0) {
105 61 : Data d(&s[0],s.size());
106 61 : pointer=d.pointer;
107 61 : size=d.size;
108 61 : type=d.type;
109 : } else {
110 0 : pointer=NULL;
111 0 : size=0;
112 : }
113 61 : }
114 : };
115 : /// Const version of Communicator::Data
116 : /// See Communicator::Data documentation
117 : struct ConstData {
118 : const void*pointer;
119 : int size;
120 : int nbytes=0;
121 : MPI_Datatype type;
122 25949 : template <typename T> explicit ConstData(const T*p,int s): pointer(p), size(s), nbytes(sizeof(T)), type(getMPIType<T>()) {}
123 952 : template <typename T> explicit ConstData(const T&p): pointer(&p), size(1), nbytes(sizeof(T)), type(getMPIType<T>()) {}
124 : template <unsigned n> explicit ConstData(const VectorGeneric<n> *p,int s): pointer(p), size(n*s), nbytes(sizeof(double)), type(getMPIType<double>()) {}
125 : template <unsigned n> explicit ConstData(const VectorGeneric<n> &p): pointer(&p), size(n), nbytes(sizeof(double)), type(getMPIType<double>()) {}
126 : template <unsigned n,unsigned m> explicit ConstData(const TensorGeneric<n,m> *p,int s): pointer(p), size(n*m*s), nbytes(sizeof(double)), type(getMPIType<double>()) {}
127 : template <unsigned n,unsigned m> explicit ConstData(const TensorGeneric<n,m> &p): pointer(&p), size(n*m), nbytes(sizeof(double)), type(getMPIType<double>()) {}
128 4956 : template <typename T> explicit ConstData(const std::vector<T>&v) {
129 4956 : ConstData d(v.data(),v.size());
130 4956 : pointer=d.pointer;
131 4956 : size=d.size;
132 4956 : type=d.type;
133 4956 : }
134 : template <typename T> explicit ConstData(const Matrix<T>&m ) {
135 : if(m.nrows()*m.ncols()>0) {
136 : ConstData d(&m(0,0),m.nrows()*m.ncols());
137 : pointer=d.pointer;
138 : size=d.size;
139 : type=d.type;
140 : } else {
141 : pointer=NULL;
142 : size=0;
143 : }
144 : }
145 57 : explicit ConstData(const std::string&s) {
146 57 : if(s.size()>0) {
147 57 : ConstData d(&s[0],s.size());
148 57 : pointer=d.pointer;
149 57 : size=d.size;
150 57 : type=d.type;
151 : } else {
152 0 : pointer=NULL;
153 0 : size=0;
154 : }
155 57 : }
156 : };
157 : public:
158 : ///Runtime acces to the __PLUMED_HAS_MPI definition
159 : static bool plumedHasMPI();
160 :
161 : /// Wrapper class for MPI_Status
162 : class Status {
163 : int Get_count(MPI_Datatype)const;
164 : public:
165 : MPI_Status s;
166 : template <class T>
167 7634 : int Get_count()const {
168 7634 : return Get_count(getMPIType<T>());
169 : }
170 : };
171 : /// Special status used when status should be ignored.
172 : /// E.g. `Recv(a,0,1,Communicator::StatusIgnore);`
173 : /// Notice that this is the default for Recv, so this is equivalent to
174 : /// `Recv(a,0,1);`
175 : static Status StatusIgnore;
176 : /// Wrapper class for MPI_Request
177 : class Request {
178 : public:
179 : MPI_Request r;
180 : void wait(Status&s=StatusIgnore);
181 : };
182 : /// Default constructor
183 : Communicator();
184 : /// Copy constructor.
185 : /// It effectively "clones" the communicator, providing a new one acting on the same group
186 : Communicator(const Communicator&);
187 : /// Assignment operator.
188 : /// It effectively "clones" the communicator, providing a new one acting on the same group
189 : Communicator& operator=(const Communicator&);
190 : /// Destructor
191 : virtual ~Communicator();
192 : /// Obtain the rank of the present process
193 : int Get_rank()const;
194 : /// Obtain the number of processes
195 : int Get_size()const;
196 : /// Set from a real MPI communicator.
197 : /// \param comm MPI communicator
198 : void Set_comm(MPI_Comm comm);
199 : /// Reference to MPI communicator
200 : MPI_Comm & Get_comm();
201 : /// Set from a pointer to a real MPI communicator (C).
202 : /// \param comm Pointer to a C MPI communicator
203 : void Set_comm(const TypesafePtr & comm);
204 : /// Set from a pointer to a real MPI communicator (FORTRAN).
205 : /// \param comm Pointer to a FORTRAN MPI communicator (INTEGER)
206 : void Set_fcomm(const TypesafePtr & comm);
207 : /// Wrapper to MPI_Abort.
208 : /// \param code Error code
209 : void Abort(int code);
210 : /// Wrapper to MPI_Barrier
211 : void Barrier()const;
212 : /// Tests if MPI library is initialized
213 : static bool initialized();
214 : /// Wrapper for MPI_Allreduce with MPI_SUM (data struct)
215 : void Sum(Data);
216 : /// Wrapper for MPI_Allreduce with MPI_SUM (pointer)
217 131908 : template <class T> void Sum(T*buf,int count) {
218 131908 : Sum(Data(buf,count));
219 131908 : }
220 : /// Wrapper for MPI_Allreduce with MPI_SUM (reference)
221 4426928 : template <class T> void Sum(T&buf) {
222 4426928 : Sum(Data(buf));
223 4426928 : }
224 : /// Wrapper for MPI_Allreduce with MPI_PROD (data struct)
225 : void Prod(Data);
226 : /// Wrapper for MPI_Allreduce with MPI_PROD (pointer)
227 : template <class T> void Prod(T*buf,int count) {
228 : Prod(Data(buf,count));
229 : }
230 : /// Wrapper for MPI_Allreduce with MPI_PROD (reference)
231 : template <class T> void Prod(T&buf) {
232 : Prod(Data(buf));
233 : }
234 : /// Wrapper for MPI_Allreduce with MPI_MAX (data struct)
235 : void Max(Data);
236 : /// Wrapper for MPI_Allreduce with MPI_MAX (pointer)
237 : template <class T> void Max(T*buf,int count) {
238 : Max(Data(buf,count));
239 : }
240 : /// Wrapper for MPI_Allreduce with MPI_MAX (reference)
241 152 : template <class T> void Max(T&buf) {
242 152 : Max(Data(buf));
243 152 : }
244 : /// Wrapper for MPI_Allreduce with MPI_MIN (data struct)
245 : void Min(Data);
246 : /// Wrapper for MPI_Allreduce with MPI_MIN (pointer)
247 : template <class T> void Min(T*buf,int count) {
248 : Min(Data(buf,count));
249 : }
250 : /// Wrapper for MPI_Allreduce with MPI_MIN (reference)
251 0 : template <class T> void Min(T&buf) {
252 0 : Min(Data(buf));
253 0 : }
254 :
255 : /// Wrapper for MPI_Bcast (data struct)
256 : void Bcast(Data,int);
257 : /// Wrapper for MPI_Bcast (pointer)
258 : template <class T> void Bcast(T*buf,int count,int root) {
259 : Bcast(Data(buf,count),root);
260 : }
261 : /// Wrapper for MPI_Bcast (reference)
262 5554041 : template <class T> void Bcast(T&buf,int root) {
263 5554041 : Bcast(Data(buf),root);
264 5554041 : }
265 :
266 : /// Wrapper for MPI_Isend (data struct)
267 : Request Isend(ConstData,int,int);
268 : /// Wrapper for MPI_Isend (pointer)
269 15268 : template <class T> Request Isend(const T*buf,int count,int source,int tag) {
270 15268 : return Isend(ConstData(buf,count),source,tag);
271 : }
272 : /// Wrapper for MPI_Isend (reference)
273 114 : template <class T> Request Isend(const T&buf,int source,int tag) {
274 114 : return Isend(ConstData(buf),source,tag);
275 : }
276 :
277 : /// Wrapper for MPI_Allgatherv (data struct)
278 : void Allgatherv(ConstData in,Data out,const int*,const int*);
279 : /// Wrapper for MPI_Allgatherv (pointer)
280 20732 : template <class T,class S> void Allgatherv(const T*sendbuf,int sendcount,S*recvbuf,const int*recvcounts,const int*displs) {
281 20732 : Allgatherv(ConstData(sendbuf,sendcount),Data(recvbuf,0),recvcounts,displs);
282 20732 : }
283 : /// Wrapper for MPI_Allgatherv (reference)
284 152 : template <class T,class S> void Allgatherv(const T&sendbuf,S&recvbuf,const int*recvcounts,const int*displs) {
285 152 : Allgatherv(ConstData(sendbuf),Data(recvbuf),recvcounts,displs);
286 152 : }
287 :
288 : /// Wrapper for MPI_Allgather (data struct)
289 : void Allgather(ConstData in,Data out);
290 : /// Wrapper for MPI_Allgatherv (pointer)
291 204 : template <class T,class S> void Allgather(const T*sendbuf,int sendcount,S*recvbuf,int recvcount) {
292 408 : Allgather(ConstData(sendbuf,sendcount),Data(recvbuf,recvcount*Get_size()));
293 204 : }
294 : /// Wrapper for MPI_Allgatherv (reference)
295 5642 : template <class T,class S> void Allgather(const T&sendbuf,S&recvbuf) {
296 6480 : Allgather(ConstData(sendbuf),Data(recvbuf));
297 5642 : }
298 :
299 : /// Wrapper for MPI_Recv (data struct)
300 : void Recv(Data,int,int,Status&s=StatusIgnore);
301 : /// Wrapper for MPI_Recv (pointer)
302 15268 : template <class T> void Recv(T*buf,int count,int source,int tag,Status&s=StatusIgnore) {
303 15268 : Recv(Data(buf,count),source,tag,s);
304 15268 : }
305 : /// Wrapper for MPI_Recv (reference)
306 114 : template <class T> void Recv(T&buf,int source,int tag,Status&s=StatusIgnore) {
307 114 : Recv(Data(buf),source,tag,s);
308 114 : }
309 :
310 : /// Wrapper to MPI_Comm_split
311 : void Split(int,int,Communicator&)const;
312 : };
313 :
314 : }
315 :
316 : #endif
|