OpenFPM  5.2.0
Project that contain the implementation of distributed structures
MPI_IsendW.hpp
1 #ifndef MPI_ISEND_HPP
2 #define MPI_ISEND_HPP
3 
4 
5 #include <mpi.h>
6 
17 {
18 public:
19  static inline void send(size_t proc , size_t tag ,const void * buf, size_t sz, MPI_Request & req, MPI_Comm ext_comm)
20  {
21  if (sz <= 2147483647)
22  {
23  MPI_Isend(buf, sz,MPI_BYTE, proc, tag , ext_comm, &req);
24  }
25  else
26  {
27  MPI_Isend(buf, sz >> 3 ,MPI_DOUBLE, proc, tag , ext_comm, &req);
28  }
29 
30  }
31 };
32 
39 template<typename T, typename Mem, template<typename> class gr> class MPI_IsendW
40 {
41 public:
42  static inline void send(size_t proc , size_t tag ,openfpm::vector<T,Mem,gr> & v, MPI_Request & req, MPI_Comm ext_comm)
43  {
44  if (v.size() * sizeof(T) <= 2147483647)
45  {
46  MPI_Isend(v.getPointer(), v.size() * sizeof(T),MPI_BYTE, proc, tag , ext_comm,&req);
47  }
48  else
49  {
50  MPI_Isend(v.getPointer(), (v.size() * sizeof(T)) >> 3,MPI_DOUBLE, proc, tag , ext_comm,&req);
51  }
52  }
53 };
54 
55 
59 template<typename Mem, template<typename> class gr> class MPI_IsendW<int,Mem,gr>
60 {
61 public:
62  static inline void send(size_t proc , size_t tag ,openfpm::vector<int,Mem,gr> & v, MPI_Request & req, MPI_Comm ext_comm)
63  {
64  MPI_Isend(v.getPointer(), v.size(),MPI_INT, proc, tag , ext_comm,&req);
65  }
66 };
67 
71 template<typename Mem, template<typename> class gr> class MPI_IsendW<unsigned int,Mem,gr>
72 {
73 public:
74  static inline void send(size_t proc , size_t tag ,openfpm::vector<unsigned int,Mem,gr> & v, MPI_Request & req, MPI_Comm ext_comm)
75  {
76  MPI_Isend(v.getPointer(), v.size(),MPI_UNSIGNED, proc, tag , ext_comm,&req);
77  }
78 };
79 
83 template<typename Mem, template<typename> class gr> class MPI_IsendW<short,Mem,gr>
84 {
85 public:
86  static inline void send(size_t proc , size_t tag ,openfpm::vector<short,Mem,gr> & v, MPI_Request & req, MPI_Comm ext_comm)
87  {
88  MPI_Isend(v.getPointer(), v.size(),MPI_SHORT, proc, tag , ext_comm,&req);
89  }
90 };
91 
95 template<typename Mem, template<typename> class gr> class MPI_IsendW<unsigned short,Mem,gr>
96 {
97 public:
98  static inline void send(size_t proc , size_t tag ,openfpm::vector<unsigned short,Mem,gr> & v, MPI_Request & req, MPI_Comm ext_comm)
99  {
100  MPI_Isend(v.getPointer(), v.size(),MPI_UNSIGNED_SHORT, proc, tag , ext_comm,&req);
101  }
102 };
103 
107 template<typename Mem, template<typename> class gr> class MPI_IsendW<char,Mem,gr>
108 {
109 public:
110  static inline void send(size_t proc , size_t tag ,openfpm::vector<char,Mem,gr> & v, MPI_Request & req, MPI_Comm ext_comm)
111  {
112  MPI_Isend(v.getPointer(), v.size(),MPI_CHAR, proc, tag , ext_comm,&req);
113  }
114 };
115 
119 template<typename Mem, template<typename> class gr> class MPI_IsendW<unsigned char,Mem,gr>
120 {
121 public:
122  static inline void send(size_t proc , size_t tag ,openfpm::vector<unsigned char,Mem,gr> & v, MPI_Request & req, MPI_Comm ext_comm)
123  {
124  MPI_Isend(v.getPointer(), v.size(),MPI_UNSIGNED_CHAR, proc, tag , ext_comm,&req);
125  }
126 };
127 
131 template<typename Mem, template<typename> class gr> class MPI_IsendW<size_t,Mem,gr>
132 {
133 public:
134  static inline void send(size_t proc , size_t tag ,openfpm::vector<size_t,Mem,gr> & v, MPI_Request & req, MPI_Comm ext_comm)
135  {
136  MPI_Isend(v.getPointer(), v.size(),MPI_UNSIGNED_LONG, proc, tag , ext_comm,&req);
137  }
138 };
139 
143 template<typename Mem, template<typename> class gr> class MPI_IsendW<long int,Mem,gr>
144 {
145 public:
146  static inline void send(size_t proc , size_t tag ,openfpm::vector<long int,Mem,gr> & v, MPI_Request & req, MPI_Comm ext_comm)
147  {
148  MPI_Isend(v.getPointer(), v.size(),MPI_LONG, proc, tag , ext_comm,&req);
149  }
150 };
151 
155 template<typename Mem, template<typename> class gr> class MPI_IsendW<float,Mem,gr>
156 {
157 public:
158  static inline void send(size_t proc , size_t tag ,openfpm::vector<float,Mem,gr> & v, MPI_Request & req, MPI_Comm ext_comm)
159  {
160  MPI_Isend(v.getPointer(), v.size(),MPI_FLOAT, proc, tag , ext_comm,&req);
161  }
162 };
163 
167 template<typename Mem, template<typename> class gr> class MPI_IsendW<double,Mem,gr>
168 {
169 public:
170  static inline void send(size_t proc , size_t tag ,openfpm::vector<double,Mem,gr> & v, MPI_Request & req, MPI_Comm ext_comm)
171  {
172  MPI_Isend(v.getPointer(), v.size(),MPI_DOUBLE, proc, tag , ext_comm,&req);
173  }
174 };
175 
176 #endif
Set of wrapping classing for MPI_Isend.
Definition: MPI_IsendW.hpp:17
General send for a vector of any type.
Definition: MPI_IsendW.hpp:40
Implementation of 1-D std::vector like structure.
Definition: map_vector.hpp:204
KeyT const ValueT ValueT OffsetIteratorT OffsetIteratorT int
[in] The number of segments that comprise the sorting data