OpenFPM  5.2.0
Project that contain the implementation of distributed structures
SparseMatrix_petsc.hpp
1 /*
2  * SparseMatrix_petsc.hpp
3  *
4  * Created on: Apr 26, 2016
5  * Author: i-bird
6  */
7 
8 #ifndef OPENFPM_NUMERICS_SRC_MATRIX_SPARSEMATRIX_PETSC_HPP_
9 #define OPENFPM_NUMERICS_SRC_MATRIX_SPARSEMATRIX_PETSC_HPP_
10 
11 #include "util/petsc_util.hpp"
12 #include "Vector/map_vector.hpp"
13 #include <boost/mpl/int.hpp>
14 #include <petscmat.h>
15 #include "VTKWriter/VTKWriter.hpp"
16 #include "CSVWriter/CSVWriter.hpp"
17 
18 #define PETSC_BASE 2
19 
26 template<typename T>
27 class triplet<T,PETSC_BASE>
28 {
30  PetscInt row_;
31 
33  PetscInt col_;
34 
36  PetscScalar val_;
37 
38 public:
39 
45  PetscInt & row()
46  {
47  return row_;
48  }
49 
55  PetscInt & col()
56  {
57  return col_;
58  }
59 
65  PetscScalar & value()
66  {
67  return val_;
68  }
69 
77  triplet(long int i, long int j, T val)
78  {
79  row_ = i;
80  col_ = j;
81  val_ = val;
82  }
83 
84  // Default constructor
85  triplet()
86  :row_(0),col_(0),val_(0)
87  {};
88 };
89 
97 template<typename T, typename id_t>
98 class SparseMatrix<T,id_t, PETSC_BASE>
99 {
100 public:
101 
103  typedef boost::mpl::int_<PETSC_BASE> triplet_impl;
104 
107 
108 private:
109 
111  size_t g_row;
113  size_t g_col;
114 
116  size_t l_row;
118  size_t l_col;
119 
121  size_t start_row;
122 
124  bool m_created = false;
125 
127  Mat mat;
128 
131 
132 
141 
146  void fill_petsc()
147  {
148  d_nnz.resize(l_row);
149  o_nnz.resize(l_row);
150 
151  d_nnz.fill(0);
152  o_nnz.fill(0);
153 
154  // Here we explore every row to count how many non zero we have in the diagonal matrix part,
155  // and the non diagonal matrix part, needed by MatMPIAIJSetPreallocation
156 
157  size_t i = 0;
158 
159  // Set the Matrix from triplet
160  while (i < trpl.size())
161  {
162  PetscInt row = trpl.get(i).row();
163 
164  while(i < trpl.size() && row == trpl.get(i).row())
165  {
166  if ((size_t)trpl.get(i).col() >= start_row && (size_t)trpl.get(i).col() < start_row + l_row)
167  d_nnz.get(row - start_row)++;
168  else
169  o_nnz.get(row - start_row)++;
170  i++;
171  }
172  }
173 
174  PETSC_SAFE_CALL(MatMPIAIJSetPreallocation(mat,0,static_cast<const PetscInt*>(d_nnz.getPointer()),0,
175  static_cast<const PetscInt*>(o_nnz.getPointer())));
176 
177  // Counter i is zero
178  i = 0;
179 
180  // Set the Matrix from triplet
181  while (i < trpl.size())
182  {
183  vals.clear();
184  cols.clear();
185 
186  PetscInt row = trpl.get(i).row();
187 
188  while(i < trpl.size() && row == trpl.get(i).row())
189  {
190  vals.add(trpl.get(i).value());
191  cols.add(trpl.get(i).col());
192  i++;
193  }
194  PETSC_SAFE_CALL(MatSetValues(mat,1,&row,cols.size(),static_cast<const PetscInt*>(cols.getPointer()),
195  static_cast<const PetscScalar *>(vals.getPointer()),
196  INSERT_VALUES));
197  }
198 
199  PETSC_SAFE_CALL(MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY));
200  PETSC_SAFE_CALL(MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY));
201 
202  m_created = true;
203  }
204 
210 
211 
212 public:
213 
221  SparseMatrix(size_t N1, size_t N2, size_t n_row_local)
222  :g_row(N1),g_col(N2),l_row(n_row_local),l_col(n_row_local)
223  {
224  PETSC_SAFE_CALL(MatCreate(PETSC_COMM_WORLD,&mat));
225  PETSC_SAFE_CALL(MatSetType(mat,MATMPIAIJ));
226  PETSC_SAFE_CALL(MatSetFromOptions(mat));
227  PETSC_SAFE_CALL(MatSetSizes(mat,n_row_local,n_row_local,N1,N2));
228 
229  Vcluster<> & v_cl = create_vcluster();
230 
231  openfpm::vector<size_t> vn_row_local;
232  v_cl.allGather(l_row,vn_row_local);
233  v_cl.execute();
234 
235  // Calculate the starting row for this processor
236 
237  start_row = 0;
238  for (size_t i = 0 ; i < v_cl.getProcessUnitID() ; i++)
239  start_row += vn_row_local.get(i);
240  }
241 
246  :g_row(0),g_col(0),l_row(0l),l_col(0),start_row(0)
247  {
248  PETSC_SAFE_CALL(MatCreate(PETSC_COMM_WORLD,&mat));
249  PETSC_SAFE_CALL(MatSetType(mat,MATMPIAIJ));
250  PETSC_SAFE_CALL(MatSetFromOptions(mat));
251 
252  }
253 
254  ~SparseMatrix()
255  {
256  // Destroy the matrix
257  if (is_openfpm_init() == true)
258  {PETSC_SAFE_CALL(MatDestroy(&mat));}
259  }
260 
269  {
270  m_created = false;
271 
272  return this->trpl;
273  }
274 
280  const Mat & getMat() const
281  {
282  if (m_created == false)
283  {fill_petsc();}
284 
285  return mat;
286  }
287 
293  Mat & getMat()
294  {
295  if (m_created == false)
296  {fill_petsc();}
297 
298  return mat;
299  }
300 
309  void resize(size_t row, size_t col, size_t l_row, size_t l_col)
310  {
311  if ((g_row != 0 && g_row != row) || (g_col != 0 && g_col != col) ||
312  (this->l_row != 0 && this->l_row != l_row) || (this->l_col != 0 && this->l_col != l_col))
313  {
314  std::cout << __FILE__ << ":" << __LINE__ << " error you are resizing a PETSC matrix " << std::endl;
315  }
316 
317  if (g_row != 0 && g_col != 0) {return;}
318 
319  this->g_row = row;
320  this->g_col = col;
321 
322  this->l_row = l_row;
323  this->l_col = l_col;
324 
325  PETSC_SAFE_CALL(MatSetSizes(mat,l_row,l_col,g_row,g_col));
326 
327  Vcluster<> & v_cl = create_vcluster();
328 
329  openfpm::vector<size_t> vn_row_local;
330  v_cl.allGather(l_row,vn_row_local);
331  v_cl.execute();
332 
333  // Calculate the starting row for this processor
334 
335  start_row = 0;
336  for (size_t i = 0 ; i < v_cl.getProcessUnitID() ; i++)
337  start_row += vn_row_local.get(i);
338  }
339 
347  T operator()(id_t i, id_t j)
348  {
349  T v;
350 
351  MatGetValues(mat,1,&i,1,&j,&v);
352 
353  return v;
354  }
355 
364  T getValue(size_t r, size_t c)
365  {
366  for (size_t i = 0 ; i < trpl.size() ; i++)
367  {
368  if (r == (size_t)trpl.get(i).row() && c == (size_t)trpl.get(i).col())
369  return trpl.get(i).value();
370  }
371 
372  return 0;
373  }
374 
382  {
383  return m_created;
384  }
385 
386  /* Write matrix on vtk
387  *
388  * \param out file to write into
389  *
390  */
391  bool write(std::string out,size_t opt = VTK_WRITER)
392  {
393  Vcluster<> & v_cl = create_vcluster();
394 
397 
398  row_col.resize(trpl.size());
399  values.resize(trpl.size());
400 
401  for (int i = 0 ; i < trpl.size() ; i++)
402  {
403  row_col.template get<0>(i)[1] = trpl.get(i).row();
404  row_col.template get<0>(i)[0] = trpl.get(i).col();
405 
406  values.template get<0>(i) = trpl.get(i).value();
407  }
408 
409  if (opt == VTK_WRITER)
410  {
411  auto ft = file_type::BINARY;
412 
413  // VTKWriter for a set of points
416  VECTOR_POINTS> vtk_writer;
417 
418  vtk_writer.add(row_col,values,row_col.size());
419 
420  std::string output = std::to_string(out + "_" + std::to_string(v_cl.getProcessUnitID()) + std::to_string(".vtk"));
421 
423  prp_names.add("value");
424 
425  // Write the VTK file
426  return vtk_writer.write(output,prp_names,"matrix","",ft);
427  }
428  else
429  {
430  // CSVWriter test
433 
434  std::string output = std::to_string(out + "_" + std::to_string(v_cl.getProcessUnitID()) + std::to_string(".csv"));
435 
436  // Write the CSV
437  return csv_writer.write(output,row_col,values);
438  }
439  }
440 };
441 
442 
443 #endif /* OPENFPM_NUMERICS_SRC_MATRIX_SPARSEMATRIX_PETSC_HPP_ */
CSV Writer.
Definition: CSVWriter.hpp:164
bool write(std::string file, v_pos &v, v_prp &prp, size_t offset=0)
It write a CSV file.
Definition: CSVWriter.hpp:248
Sparse Matrix implementation, that map over Eigen.
openfpm::vector< PetscScalar > vals
temporary list of values
size_t l_col
Number of matrix colums (local)
size_t l_row
Number of matrix row (local)
triplet< T, PETSC_BASE > triplet_type
Triplet type.
openfpm::vector< PetscInt > cols
temporary list of colums
T operator()(id_t i, id_t j)
Get the row i and the colum j of the Matrix.
SparseMatrix(size_t N1, size_t N2, size_t n_row_local)
Create an empty Matrix.
T getValue(size_t r, size_t c)
Get the value from triplet.
boost::mpl::int_< PETSC_BASE > triplet_impl
Triplet implementation id.
SparseMatrix(const SparseMatrix< T, id_t, PETSC_BASE > &spm)
Disable copy constructor.
size_t g_col
Number of matrix colums (global)
size_t start_row
starting row for this processor
openfpm::vector< triplet_type > trpl
Triplets of the matrix.
bool isMatrixFilled()
Return the state of matrix.
size_t g_row
Number of matrix row (global)
openfpm::vector< PetscInt > d_nnz
PETSC d_nnz.
Mat & getMat()
Get the Petsc Matrix object.
openfpm::vector< PetscInt > o_nnz
PETSC o_nnz.
void fill_petsc()
Fill the petsc Matrix.
openfpm::vector< triplet_type > & getMatrixTriplets()
Get the Matrix triplets buffer.
void resize(size_t row, size_t col, size_t l_row, size_t l_col)
Resize the Sparse Matrix.
const Mat & getMat() const
Get the Patsc Matrix object.
Sparse Matrix implementation.
void execute()
Execute all the requests.
size_t getProcessUnitID()
Get the process unit id.
bool allGather(T &send, openfpm::vector< T, Mem, gr > &v)
Gather the data from all processors.
Implementation of VCluster class.
Definition: VCluster.hpp:59
Implementation of 1-D std::vector like structure.
Definition: map_vector.hpp:204
size_t size()
Stub size.
Definition: map_vector.hpp:212
It store one non-zero element in the sparse matrix.
PetscScalar & value()
Return the value of the triplet.
PetscInt col_
Colum of the sparse matrix.
PetscInt row_
Row of the sparse matrix.
PetscInt & col()
Return the colum of the triplet.
PetscInt & row()
Return the row of the triplet.
PetscScalar val_
Value of the Matrix.
triplet(long int i, long int j, T val)
Constructor from row, colum and value.
It store the non zero elements of the matrix.