OpenFPM  5.2.0
Project that contain the implementation of distributed structures
MPI_IBcastW.hpp
1 /*
2  * MPI_IBcastW.hpp
3  *
4  * Created on: Apr 8, 2017
5  * Author: i-bird
6  * Modified by: Abhinav Singh
7  */
8 
9 #ifndef OPENFPM_VCLUSTER_SRC_MPI_WRAPPER_MPI_IBCASTW_HPP_
10 #define OPENFPM_VCLUSTER_SRC_MPI_WRAPPER_MPI_IBCASTW_HPP_
11 
12 
13 
14 #include <mpi.h>
15 
33 {
34 public:
35  static inline void bcast(size_t proc ,void * buf, size_t sz, MPI_Request & req, MPI_Comm ext_comm)
36  {
37  MPI_SAFE_CALL(MPI_Ibcast(buf,sz,MPI_BYTE, proc , ext_comm,&req));
38  }
39 };
40 
47 template<typename T> class MPI_IBcastW
48 {
49 public:
50  template<typename Memory> static inline void bcast(size_t proc ,openfpm::vector<T, Memory> & v, MPI_Request & req, MPI_Comm ext_comm)
51  {
52  MPI_SAFE_CALL(MPI_Ibcast(v.getPointer(), v.size() * sizeof(T),MPI_BYTE, proc , ext_comm,&req));
53  }
54 };
55 
56 
60 template<> class MPI_IBcastW<int>
61 {
62 public:
63  static inline void bcast(size_t proc ,openfpm::vector<int> & v, MPI_Request & req, MPI_Comm ext_comm)
64  {
65  MPI_SAFE_CALL(MPI_Ibcast(v.getPointer(), v.size(),MPI_INT, proc , ext_comm,&req));
66  }
67 };
68 
72 template<> class MPI_IBcastW<unsigned int>
73 {
74 public:
75  static inline void bcast(size_t proc ,openfpm::vector<unsigned int> & v, MPI_Request & req, MPI_Comm ext_comm)
76  {
77  MPI_SAFE_CALL(MPI_Ibcast(v.getPointer(), v.size(),MPI_UNSIGNED, proc , ext_comm,&req));
78  }
79 };
80 
84 template<> class MPI_IBcastW<short>
85 {
86 public:
87  static inline void bcast(size_t proc ,openfpm::vector<short> & v, MPI_Request & req, MPI_Comm ext_comm)
88  {
89  MPI_SAFE_CALL(MPI_Ibcast(v.getPointer(), v.size(),MPI_SHORT, proc , ext_comm,&req));
90  }
91 };
92 
96 template<> class MPI_IBcastW<unsigned short>
97 {
98 public:
99  static inline void bcast(size_t proc ,openfpm::vector<unsigned short> & v, MPI_Request & req, MPI_Comm ext_comm)
100  {
101  MPI_SAFE_CALL(MPI_Ibcast(v.getPointer(), v.size(),MPI_UNSIGNED_SHORT, proc , ext_comm,&req));
102  }
103 };
104 
108 template<> class MPI_IBcastW<char>
109 {
110 public:
111  static inline void bcast(size_t proc ,openfpm::vector<char> & v, MPI_Request & req, MPI_Comm ext_comm)
112  {
113  MPI_SAFE_CALL(MPI_Ibcast(v.getPointer(), v.size(),MPI_CHAR, proc , ext_comm,&req));
114  }
115 };
116 
120 template<> class MPI_IBcastW<unsigned char>
121 {
122 public:
123  static inline void bcast(size_t proc ,openfpm::vector<unsigned char> & v, MPI_Request & req, MPI_Comm ext_comm)
124  {
125  MPI_SAFE_CALL(MPI_Ibcast(v.getPointer(), v.size(),MPI_UNSIGNED_CHAR, proc , ext_comm,&req));
126  }
127 };
128 
132 template<> class MPI_IBcastW<size_t>
133 {
134 public:
135  static inline void bcast(size_t proc ,openfpm::vector<size_t> & v, MPI_Request & req, MPI_Comm ext_comm)
136  {
137  MPI_SAFE_CALL(MPI_Ibcast(v.getPointer(), v.size(),MPI_UNSIGNED_LONG, proc , ext_comm,&req));
138  }
139 };
140 
144 template<> class MPI_IBcastW<long int>
145 {
146 public:
147  static inline void bcast(size_t proc ,openfpm::vector<long int> & v, MPI_Request & req, MPI_Comm ext_comm)
148  {
149  MPI_SAFE_CALL(MPI_Ibcast(v.getPointer(), v.size(),MPI_LONG, proc , ext_comm,&req));
150  }
151 };
152 
156 template<> class MPI_IBcastW<float>
157 {
158 public:
159  static inline void bcast(size_t proc ,openfpm::vector<float> & v, MPI_Request & req, MPI_Comm ext_comm)
160  {
161  MPI_SAFE_CALL(MPI_Ibcast(v.getPointer(), v.size(),MPI_FLOAT, proc , ext_comm,&req));
162  }
163 };
164 
168 template<> class MPI_IBcastW<double>
169 {
170 public:
171  static inline void bcast(size_t proc ,openfpm::vector<double> & v, MPI_Request & req, MPI_Comm ext_comm)
172  {
173  MPI_SAFE_CALL(MPI_Ibcast(v.getPointer(), v.size(),MPI_DOUBLE, proc , ext_comm,&req));
174  }
175 };
176 
177 
185 template<typename vect>
187 {
189  MPI_Comm ext_comm;
190 
192  vect & send;
193 
196 
198  size_t root;
199 
205  inline bcast_inte_impl(vect & send,
207  size_t root, MPI_Comm ext_comm=MPI_COMM_WORLD)
209  {};
210 
212  template<typename T>
213  inline void operator()(T& t)
214  {
215  typedef typename boost::mpl::at<typename vect::value_type::type,T>::type send_type;
216 
217  // Create one request
218  req.add();
219 
220  // gather
221  MPI_IBcastWB::bcast(root,&send.template get<T::value>(0),send.size()*sizeof(send_type),req.last(), ext_comm);
222  }
223 };
224 
225 template<bool is_lin_or_inte>
227 {
228  template<typename T, typename Mem, template<typename> class layout_base >
229  static void bcast_(openfpm::vector<MPI_Request> & req,
231  size_t root, MPI_Comm ext_comm)
232  {
233  // Create one request
234  req.add();
235 
236  // gather
237  MPI_IBcastW<T>::bcast(root,v,req.last(), ext_comm);
238  }
239 };
240 
241 template<>
242 struct b_cast_helper<false>
243 {
244  template<typename T, typename Mem, template<typename> class layout_base >
245  static void bcast_(openfpm::vector<MPI_Request> & req,
247  size_t root, MPI_Comm ext_comm)
248  {
250 
251  boost::mpl::for_each_ref<boost::mpl::range_c<int,0,T::max_prop>>(bc);
252  }
253 };
254 
255 #endif /* OPENFPM_VCLUSTER_SRC_MPI_WRAPPER_MPI_IBCASTW_HPP_ */
Set of wrapping classing for MPI_Irecv.
Definition: MPI_IBcastW.hpp:33
General recv for vector of.
Definition: MPI_IBcastW.hpp:48
Implementation of 1-D std::vector like structure.
Definition: map_vector.hpp:204
KeyT const ValueT ValueT OffsetIteratorT OffsetIteratorT int
[in] The number of segments that comprise the sorting data
this class is a functor for "for_each" algorithm
void operator()(T &t)
It call the copy function for each property.
vect & send
vector to broadcast
bcast_inte_impl(vect &send, openfpm::vector< MPI_Request > &req, size_t root, MPI_Comm ext_comm=MPI_COMM_WORLD)
constructor
openfpm::vector< MPI_Request > & req
vector of requests
MPI_Comm ext_comm
External Communicator.
size_t root
root processor