31 #ifndef MPICONTAINER_H_
32 #define MPICONTAINER_H_
39 #ifdef MERCURYDPM_USE_MPI
43 #ifdef MERCURYDPM_FORCE_ASSERTS
44 #define MERCURYDPM_ASSERTS true
46 #ifdef MERCURYDPM_NO_ASSERTS
47 #define MERCURYDPM_ASSERTS false
50 #define MERCURYDPM_ASSERTS false
52 #define MERCURYDPM_ASSERTS true
92 #ifdef MERCURYDPM_USE_MPI
95 typename std::enable_if<std::is_integral<T>::value, MPI_Datatype>::type
99 MPI_Type_match_size(MPI_TYPECLASS_INTEGER,
sizeof(T), &type);
105 typename std::enable_if<std::is_floating_point<T>::value, MPI_Datatype>::type
109 MPI_Type_match_size(MPI_TYPECLASS_REAL,
sizeof(T),&type);
154 #ifdef MERCURYDPM_USE_MPI
155 MPI_Waitall(pending_.size(),pending_.data(),MPI_STATUSES_IGNORE);
157 MPI_Barrier(communicator_);
170 typename std::enable_if<std::is_scalar<T>::value,
void>::type
173 #if MERCURYDPM_ASSERTS
176 logger(
FATAL,
"[MPI FATAL]: Sending data to self!");
179 #ifdef MERCURYDPM_USE_MPI
181 MPI_Isend(&t, 1, Detail::toMPIType(t), to, tag, communicator_, &request);
182 pending_.push_back(request);
189 typename std::enable_if<std::is_scalar<T>::value,
void>::type
190 send(T* t,
int count,
int to,
int tag)
192 #if MERCURYDPM_ASSERTS
195 logger(
FATAL,
"[MPI FATAL]: Sending data to self!");
200 logger(
WARN,
"[MPI ERROR]: Sending zero data");
203 #ifdef MERCURYDPM_USE_MPI
205 MPI_Isend(t, count, Detail::toMPIType(*t), to, tag, communicator_, &request);
206 pending_.push_back(request);
220 typename std::enable_if<std::is_scalar<T>::value,
void>::type
223 #if MERCURYDPM_ASSERTS
226 logger(
FATAL,
"[MPI FATAL]: Receiving data from self!");
229 #ifdef MERCURYDPM_USE_MPI
231 MPI_Irecv(&t, 1, Detail::toMPIType(t), from, tag, communicator_, &request);
232 pending_.push_back(request);
239 typename std::enable_if<std::is_scalar<T>::value,
void>::type
242 #if MERCURYDPM_ASSERTS
245 logger(
FATAL,
"[MPI FATAL]: Receiving data fromself!");
250 logger(
WARN,
"[MPI ERROR]: Receiving zero data");
253 #ifdef MERCURYDPM_USE_MPI
255 MPI_Irecv(&t, count, Detail::toMPIType(*t), from, tag, communicator_, &request);
256 pending_.push_back(request);
274 #if MERCURYDPM_ASSERTS
277 logger(
FATAL,
"[MPI FATAL]: Sending data to self!");
282 logger(
WARN,
"[MPI ERROR]: Sending zero data");
285 #ifdef MERCURYDPM_USE_MPI
287 MPI_Isend(t, count, dataTypes_[type], to, tag, communicator_, &request);
288 pending_.push_back(request);
306 #if MERCURYDPM_ASSERTS
309 logger(
FATAL,
"[MPI FATAL]: Receiving data to self!");
314 logger(
WARN,
"[MPI ERROR]: Receiving zero data");
317 #ifdef MERCURYDPM_USE_MPI
319 MPI_Irecv(t, count, dataTypes_[type], from, tag, communicator_, &request);
320 pending_.push_back(request);
334 typename std::enable_if<std::is_scalar<T>::value,
void>::type
337 #if MERCURYDPM_ASSERTS
340 logger(
FATAL,
"[MPI FATAL]: Sending data to self!");
345 logger(
WARN,
"[MPI ERROR]: Sending zero data");
348 #ifdef MERCURYDPM_USE_MPI
349 MPI_Ssend(&t, count, Detail::toMPIType(t), to, tag, communicator_);
357 #if MERCURYDPM_ASSERTS
360 logger(
FATAL,
"[MPI FATAL]: Sending data to self!");
365 logger(
WARN,
"[MPI ERROR]: Sending zero data");
368 #ifdef MERCURYDPM_USE_MPI
369 MPI_Ssend(t,count,dataTypes_[type], to, tag, communicator_);
382 typename std::enable_if<std::is_scalar<T>::value,
void>::type
385 #if MERCURYDPM_ASSERTS
388 logger(
FATAL,
"[MPI FATAL]: Receiving data from self!");
393 logger(
WARN,
"[MPI ERROR]: Receiving zero data");
396 #ifdef MERCURYDPM_USE_MPI
397 MPI_Recv(&t, count, Detail::toMPIType(t), from, tag,communicator_, MPI_STATUS_IGNORE);
404 #if MERCURYDPM_ASSERTS
407 logger(
FATAL,
"[MPI FATAL]: Receiving data to self!");
412 logger(
WARN,
"[MPI ERROR]: Receiving zero data");
415 #ifdef MERCURYDPM_USE_MPI
416 MPI_Recv(t, count, dataTypes_[type], from, tag, communicator_, MPI_STATUS_IGNORE);
430 #ifdef MERCURYDPM_USE_MPI
431 MPI_Gather(&send_t, 1, Detail::toMPIType(send_t), receive_t, 1, Detail::toMPIType(send_t), 0, communicator_);
440 typename std::enable_if<std::is_scalar<T>::value,
void>::type
443 #ifdef MERCURYDPM_USE_MPI
444 MPI_Bcast(&t,1,Detail::toMPIType(t),fromProcessor,communicator_);
453 typename std::enable_if<std::is_scalar<T>::value,
void>::type
456 #ifdef MERCURYDPM_USE_MPI
457 MPI_Bcast((
void *)t,size,Detail::toMPIType(t[0]),fromProcessor,communicator_);
469 #ifdef MERCURYDPM_USE_MPI
470 MPI_Bcast((
void *)t,1,dataTypes_[type],fromProcessor,communicator_);
484 #ifdef MERCURYDPM_USE_MPI
486 typename std::enable_if<std::is_scalar<T>::value,
void>::type
487 reduce(T& t, MPI_Op operation,
int id = 0)
492 MPI_Reduce(MPI_IN_PLACE, &t, 1, Detail::toMPIType(t), operation,
id, communicator_);
496 MPI_Reduce(&t,
nullptr, 1, Detail::toMPIType(t), operation,
id, communicator_);
509 #ifdef MERCURYDPM_USE_MPI
511 typename std::enable_if<std::is_scalar<T>::value,
void>::type
512 allReduce(T& send_t, T& receive_t, MPI_Op operation)
514 MPI_Allreduce(&send_t, &receive_t, 1, Detail::toMPIType(send_t), operation,communicator_);
527 #ifdef MERCURYDPM_USE_MPI
529 typename std::enable_if<std::is_scalar<T>::value,
void>::type
530 allGather(T& send_t,
int send_count, std::vector<T>& receive_t,
int receive_count)
532 MPI_Allgather(&send_t, send_count, Detail::toMPIType(send_t),
533 receive_t.data(), receive_count, Detail::toMPIType(receive_t[0]),communicator_);
550 #ifdef MERCURYDPM_USE_MPI
566 #ifdef MERCURYDPM_USE_MPI
567 MPI_Datatype MPIType;
568 MPI_Type_contiguous(
sizeof(T), MPI_BYTE, &MPIType);
569 MPI_Type_commit(&MPIType);
570 dataTypes_.push_back(MPIType);
580 #ifdef MERCURYDPM_USE_MPI
581 for(MPI_Datatype type : dataTypes_)
583 MPI_Type_free(&type);
611 #ifdef MERCURYDPM_USE_MPI
615 std::vector<MPI_Request> pending_;
620 MPI_Comm communicator_;
624 std::vector<MPI_Datatype> dataTypes_;
Logger< MERCURYDPM_LOGLEVEL > logger("MercuryKernel")
Definition of different loggers with certain modules. A user can define its own custom logger here.
MercuryMPITag
An enum that facilitates the creation of unique communication tags in the parallel code.
Definition: MpiContainer.h:77
@ SUPERQUADRIC_DATA
Definition: MpiContainer.h:87
@ INTERACTION_DATA
Definition: MpiContainer.h:84
@ VELOCITY_DATA
Definition: MpiContainer.h:82
@ INTERACTION_COUNT
Definition: MpiContainer.h:83
@ PARTICLE_INDEX
Definition: MpiContainer.h:86
@ PERIODIC_POSITION_DATA
Definition: MpiContainer.h:81
@ PARTICLE_COUNT
Definition: MpiContainer.h:78
@ POSITION_DATA
Definition: MpiContainer.h:80
@ PARTICLE_DATA
Definition: MpiContainer.h:79
@ PERIODIC_COMPLEXITY
Definition: MpiContainer.h:85
void initialiseMPI()
Inialises the MPI library.
Definition: MpiContainer.cc:137
MercuryMPIType
An enum that indicates what type of data is being send over MPI.
Definition: MpiContainer.h:66
@ VELOCITY
Definition: MpiContainer.h:67
@ FORCE
Definition: MpiContainer.h:67
@ SUPERQUADRIC
Definition: MpiContainer.h:67
@ INTERACTION
Definition: MpiContainer.h:67
@ PARTICLE
Definition: MpiContainer.h:67
@ POSITION
Definition: MpiContainer.h:67
This class contains all information and functions required for communication between processors.
Definition: MpiContainer.h:130
void deleteMercuryMPITypes()
Deletes the MercuryMPITypes.
Definition: MpiContainer.h:578
std::enable_if< std::is_scalar< T >::value, void >::type broadcast(T &t, int fromProcessor=0)
Broadcasts a scalar from the root to all other processors.
Definition: MpiContainer.h:441
std::size_t getNumberOfProcessors() const
Get the total number of processors participating in this simulation.
Definition: MpiContainer.cc:104
std::enable_if< std::is_scalar< T >::value, void >::type receive(T &t, int from, int tag)
asynchronously receive a scalar from some other processor.
Definition: MpiContainer.h:221
void receive(T *t, MercuryMPIType type, int count, int from, int tag)
asynchronously receive a list of MercuryMPIType objects from some other processor.
Definition: MpiContainer.h:303
int numberOfProcessors_
The total number of processors in the communicator.
Definition: MpiContainer.h:609
void initialiseMercuryMPITypes(const SpeciesHandler &speciesHandler)
Creates the MPI types required for communication of Mercury data through the MPI interface.
Definition: MpiContainer.cc:74
std::enable_if< std::is_scalar< T >::value, void >::type send(T *t, int count, int to, int tag)
Definition: MpiContainer.h:190
std::enable_if< std::is_scalar< T >::value, void >::type directReceive(T &t, int count, int from, int tag)
synchronously receive a list of scalars from another processor. if the send command has not been issu...
Definition: MpiContainer.h:383
void directSend(T *t, MercuryMPIType type, int count, int to, int tag)
Definition: MpiContainer.h:355
void gather(T &send_t, T *receive_t)
Gathers a scaler from all processors to a vector of scalars on the root.
Definition: MpiContainer.h:428
void send(T *t, MercuryMPIType type, int count, int to, int tag)
asynchronously send a list of MercuryMPITypes objects to some other processor.
Definition: MpiContainer.h:271
int processorID_
The ID of the processor this class is running on.
Definition: MpiContainer.h:604
static MPIContainer & Instance()
fetch the instance to be used for communication
Definition: MpiContainer.h:134
MPIContainer()
Constructor.
Definition: MpiContainer.cc:43
std::enable_if< std::is_scalar< T >::value, void >::type broadcast(T *t, int size, int fromProcessor)
Broadcasts a scalar from the root to all other processors.
Definition: MpiContainer.h:454
void sync()
Process all pending asynchronous communication requests before continuing.
Definition: MpiContainer.h:152
std::enable_if< std::is_scalar< T >::value, void >::type directSend(T &t, int count, int to, int tag)
synchronously send a list of scalars to another processor. the data should be received directly or th...
Definition: MpiContainer.h:335
void directReceive(T *t, MercuryMPIType type, int count, int from, int tag)
Definition: MpiContainer.h:402
void createMercuryMPIType(T t, MercuryMPIType type)
Get the communicator used for MPI commands.
Definition: MpiContainer.h:564
void broadcast(T *t, MercuryMPIType type, int fromProcessor=0)
Broadcasts an MercuryMPIType to all other processors.
Definition: MpiContainer.h:467
std::size_t getProcessorID()
Reduces a scalar on all processors to one scalar on a target processor.
Definition: MpiContainer.cc:113
std::enable_if< std::is_scalar< T >::value, void >::type send(T &t, int to, int tag)
Asynchronously send a scalar to some other processor.
Definition: MpiContainer.h:171
std::enable_if< std::is_scalar< T >::value, void >::type receive(T *t, int count, int from, int tag)
Definition: MpiContainer.h:240
MPIContainer(const MPIContainer &orig)=delete
Copy constructor is disabled, to enforce a singleton pattern.
Container to store all ParticleSpecies.
Definition: SpeciesHandler.h:37
Definition: MpiContainer.h:91