CppNoddy  0.92
Loading...
Searching...
No Matches
Public Member Functions | List of all members
CppNoddy::SparseLinearSystem< _Type > Class Template Reference

A linear system class for vector right-hand sides. More...

#include <SparseLinearSystem.h>

Public Member Functions

 SparseLinearSystem (SparseMatrix< _Type > *Aptr, DenseVector< _Type > *Bptr, std::string which="native")
 Constructor for a sparse linear system object. More...
 
 ~SparseLinearSystem ()
 Destructor for a linear system object. More...
 
void cleanup ()
 deallocates some objects More...
 
void solve ()
 Solve the sparse system. More...
 
void factorise ()
 Factorise the Ax=B system. More...
 
void solve_using_factorisation ()
 Resolve the same system using the same factorisation. More...
 
void temp_solve ()
 
void factorise ()
 
void solve_using_factorisation ()
 
void factorise ()
 
void solve_using_factorisation ()
 

Detailed Description

template<typename _Type>
class CppNoddy::SparseLinearSystem< _Type >

A linear system class for vector right-hand sides.

The class is constructed for SPARSE problems of the form

\[ A_{NxN} \,{\underline x}_i = B_{1xN} \]

.

Definition at line 20 of file SparseLinearSystem.h.

Constructor & Destructor Documentation

◆ SparseLinearSystem()

template<typename _Type >
CppNoddy::SparseLinearSystem< _Type >::SparseLinearSystem ( SparseMatrix< _Type > *  Aptr,
DenseVector< _Type > *  Bptr,
std::string  which = "native" 
)

Constructor for a sparse linear system object.

Parameters
AptrA pointer to the 'A matrix', an NxN double/complex sparse matrix
BptrA pointer to the 'B vector' a size N double/complex dense vector
whichA string that indicates which solver to use: native (default) pr petsc

Definition at line 21 of file SparseLinearSystem.cpp.

21 : m_factorised(false) {
22 m_pA = Aptr;
23 m_pB = Bptr;
24 m_version = which;
25 //
26 if (m_version != "petsc") {
27 std::string problem;
28 problem = "The SparseLinearSystem has been instantiated with an unrecognised\n";
29 problem += "request for a solver type. Options: 'petsc'. \n";
30 throw ExceptionRuntime(problem);
31 }
32 //
33 if(m_version == "petsc") {
34#if !defined(PETSC_D) && !defined(PETSC_Z)
35 std::string problem;
36 problem = "The SparseLinearSystem has been instantiated for a petsc solver.\n";
37 problem += "HOWEVER, CppNoddy was not compiled with PETSC_D or PETSC_Z defined.\n";
38 throw ExceptionRuntime(problem);
39#endif
40 }
41 //
42#if defined(PETSC_D) || defined(PETSC_Z)
43 if(m_version == "petsc") {
44 int flag(0);
45 MPI_Initialized(&flag);
46 if(flag != 1) {
47 std::string problem;
48 problem = "The SparseLinearSystem has been instantiated for a petsc solver.\n";
49 problem += "You must call PetscInitialize before calling the petsc solver.\n";
50 throw ExceptionRuntime(problem);
51 }
52 }
53#endif
54 }

◆ ~SparseLinearSystem()

template<typename _Type >
CppNoddy::SparseLinearSystem< _Type >::~SparseLinearSystem

Destructor for a linear system object.

Definition at line 57 of file SparseLinearSystem.cpp.

57 {
58 cleanup();
59 }
void cleanup()
deallocates some objects

Member Function Documentation

◆ cleanup()

template<typename _Type >
void CppNoddy::SparseLinearSystem< _Type >::cleanup

deallocates some objects

Definition at line 62 of file SparseLinearSystem.cpp.

62 {
63#if defined(PETSC_D) || defined(PETSC_Z)
64 // delete objects used in the factorisation?
65 if(m_factorised) {
66 VecDestroy(&m_petsc_x);
67 VecDestroy(&m_petsc_B);
68 KSPDestroy(&m_petsc_ksp);
69 m_factorised = false;
70 }
71#endif
72 }

Referenced by main().

◆ factorise() [1/3]

template<typename _Type >
void CppNoddy::SparseLinearSystem< _Type >::factorise ( )

Factorise the Ax=B system.

Referenced by main().

◆ factorise() [2/3]

void CppNoddy::SparseLinearSystem< double >::factorise ( )

Definition at line 93 of file SparseLinearSystem.cpp.

93 {
94#if !defined(PETSC_D)
95 std::string problem;
96 problem = "CppNoddy is linked against the COMPLEX version of PETSc\n";
97 problem += "but you are trying to factorise a DOUBLE matrix. Either\n";
98 problem += "redefine your matrix as complex, or recompile with $PETSC_ARCH\n";
99 problem += "pointing to a DOUBLE version of the PETSc code.";
100 throw ExceptionExternal(problem);
101#endif
102#if defined(PETSC_D)
103
104 if(m_factorised) {
105 // already factorised -- so delete and re-create below
106 cleanup();
107 }
108
109 // store a boolean to indicate that we
110 m_factorised = true;
111 PetscInt Istart,Iend,n;
112 Mat petsc_A;
113
114 // size of the (assumed square) matrix
115 n = m_pA -> nrows();
116 /*
117 Create parallel vectors B (for RHS) & x (for soln).
118 */
119 VecCreate(PETSC_COMM_WORLD,&m_petsc_B);
120 VecSetSizes(m_petsc_B,PETSC_DECIDE,m_pA->nrows());
121 // add any command line configuration
122 VecSetFromOptions(m_petsc_B);
123 // make a copy to define x
124 VecDuplicate(m_petsc_B,&m_petsc_x);
125
126
127 // configure the A matrix
128 MatCreate(PETSC_COMM_WORLD,&petsc_A);
129 // set A to be an nxn matrix
130 MatSetSizes(petsc_A,PETSC_DECIDE,PETSC_DECIDE,n,n);
131 // add any command line configuration
132 MatSetFromOptions(petsc_A);
133
134 // get: all_rows_nnz[i] is the number of nonzero elts in row i
135 PetscInt* all_rows_nnz = new PetscInt[ n ];
136 m_pA -> nelts_all_rows(all_rows_nnz);
137
138 // pre-allocate memory using the number of non-zero elts
139 // in each row (the 0 is ignored here)
140 MatSeqAIJSetPreallocation(petsc_A, 0, all_rows_nnz);
141
142 // add any command line configuration
143 MatSetFromOptions(petsc_A);
144 // finish the A definition
145 MatSetUp(petsc_A);
146
147 /*
148 petsc_A is defined as Sequential above, so this is not strictly needed
149 */
150 MatGetOwnershipRange(petsc_A,&Istart,&Iend);
151 // populate the A matrix using the CppNoddy sparse matrix data
152 for(PetscInt i = Istart; i<Iend; ++i) {
153 // move the matrix data into PETSc format 1 row at a time
154 std::size_t nelts_in_row = all_rows_nnz[i];
155 // row i has all_rows_nnz[i] elements that are non-zero, so we store their columns
156 PetscInt* cols = new PetscInt[all_rows_nnz[i]];
157 // store the non-zero elts in this row
158 PetscScalar* storage = new PetscScalar[all_rows_nnz[i]];
159 // get the data from the CppNoddy sparse matrix structure
160 m_pA -> get_row_petsc(i, storage, cols);
161 MatSetValues(petsc_A,1,&i,nelts_in_row,cols,storage,INSERT_VALUES);
162 // delete temp storage made in the conversion
163 delete[] cols;
164 delete[] storage;
165 }
166
167 // MatSetValue inserted values are generally cached
168 // so we need to explicitly do final assembly
169 MatAssemblyBegin(petsc_A,MAT_FINAL_ASSEMBLY);
170 MatAssemblyEnd(petsc_A,MAT_FINAL_ASSEMBLY);
171
172 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173 Create the linear solver and set various options
174 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175 */
176 KSPCreate(PETSC_COMM_WORLD,&m_petsc_ksp);
177 KSPSetOperators(m_petsc_ksp,petsc_A,petsc_A);
178
179 /////////////////////////////
180 // default solver is MUMPS //
181 /////////////////////////////
182 //KSPSetType(m_petsc_ksp,KSPPREONLY);
183 KSPGetPC(m_petsc_ksp,&m_petsc_pc);
184 // hardwire a DIRECT SOLVER via MUMPS
185 //PCSetType(m_petsc_pc,PCLU);
186 //PCFactorSetMatSolverType(m_petsc_pc,MATSOLVERMUMPS);
187 //PCFactorSetUpMatSolverType(m_petsc_pc);
188 /////////////////////////////
189 KSPSetFromOptions(m_petsc_ksp);
190 KSPSetUp(m_petsc_ksp);
191
192 /* create m_petsc_F */
193 PCFactorGetMatrix(m_petsc_pc,&m_petsc_F);
194
195 /* increase estimate of working space by 20% */
196 // MatMumpsSetIcntl(m_petsc_F,14,20.0);
197
198 /* sequential ordering */
199 // icntl = 7;
200 // ival = 2;
201 // MatMumpsSetIcntl(m_petsc_F,icntl,ival);
202
203 /* threshhold for row pivot detection */
204 // MatMumpsSetIcntl(m_petsc_F,24,1);
205 // icntl = 3;
206 // val = 1.e-6;
207 // MatMumpsSetCntl(m_petsc_F,icntl,val);
208
209 /* compute determinant of A */
210 // MatMumpsSetIcntl(m_petsc_F,33,1);
211 /* not used unless we initialise PETSc using the command line options */
212 // KSPSetFromOptions(m_petsc_ksp);
213
214
215 // /* determinant calculation */
216 // {
217 // PetscInt icntl,infog34;
218 // PetscReal cntl,rinfo12,rinfo13;
219 // icntl = 3;
220 // MatMumpsGetCntl(F,icntl,&cntl);
221 // // output determinant only the first proc.
222 // if (!rank)
223 // {
224 // MatMumpsGetInfog(F,34,&infog34);
225 // MatMumpsGetRinfog(F,12,&rinfo12);
226 // MatMumpsGetRinfog(F,13,&rinfo13);
227 // PetscPrintf(PETSC_COMM_SELF," Mumps row pivot threshhold = %g\n",cntl);
228 // PetscPrintf(PETSC_COMM_SELF," Mumps determinant = (%g, %g) * 2^%D \n",(double)rinfo12,(double)rinfo13,infog34);
229 // }
230 // }
231
232 MatDestroy(&petsc_A);
233 delete[] all_rows_nnz;
234
235#endif // check for PETSC_D/Z
236 }

◆ factorise() [3/3]

void CppNoddy::SparseLinearSystem< std::complex< double > >::factorise ( )

Definition at line 301 of file SparseLinearSystem.cpp.

301 {
302#if !defined(PETSC_Z)
303 std::string problem;
304 problem = "CppNoddy is linked against the DOUBLE version of PETSc\n";
305 problem += "but you are trying to factorise a COMPLEX matrix.\n";
306 problem += "Recompile with $PETSC_ARCH\n";
307 problem += "pointing to a COMPLEX version of the PETSc code.";
308 throw ExceptionExternal(problem);
309#endif
310
311#if defined(PETSC_Z)
312 if(m_factorised) {
313 // already factorised -- so delete and re-create below
314 cleanup();
315 }
316
317 // store a boolean to indicate that we
318 m_factorised = true;
319 PetscInt Istart,Iend,n;
320 Mat A;
321
322 // size of the (assumed square) matrix
323 n = m_pA -> nrows();
324 /*
325 Create parallel vectors.
326 */
327 VecCreate(PETSC_COMM_WORLD,&m_petsc_B);
328 VecSetSizes(m_petsc_B,PETSC_DECIDE,m_pA->nrows());
329 VecSetFromOptions(m_petsc_B);
330 VecDuplicate(m_petsc_B,&m_petsc_x);
331
332 // configure the A matrix
333 MatCreate(PETSC_COMM_WORLD,&A);
334 // set A to be an nxn matrix
335 MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,n,n);
336 MatSetFromOptions(A);
337
338 // get: all_rows_nnz[i] is the number of nonzero elts in row i
339 PetscInt* all_rows_nnz = new PetscInt[ n ];
340 m_pA -> nelts_all_rows(all_rows_nnz);
341
342 // pre-allocate memory using the number of non-zero elts
343 // in each row (the 0 is ignored here)
344 MatSeqAIJSetPreallocation(A, 0, all_rows_nnz);
345 MatSetUp(A);
346
347 /*
348 Currently, all PETSc parallel matrix formats are partitioned by
349 contiguous chunks of rows across the processors. Determine which
350 rows of the matrix are locally owned.
351 */
352 MatGetOwnershipRange(A,&Istart,&Iend);
353 // populate the A matrix using the CppNoddy sparse matrix data
354 for(PetscInt i = Istart; i<Iend; ++i) {
355 // move the matrix data into PETSc format 1 row at a time
356 std::size_t nelts_in_row = all_rows_nnz[i];
357 // row i has all_rows_nnz[i] elements that are non-zero, so we store their columns
358 PetscInt* cols = new PetscInt[nelts_in_row];
359 // store the non-zero elts in this row
360 PetscScalar* storage = new PetscScalar[nelts_in_row];
361 // get the data from the CppNoddy sparse matrix structure
362 m_pA -> get_row_petsc(i, storage, cols);
363 MatSetValues(A,1,&i,nelts_in_row,cols,storage,INSERT_VALUES);
364 // delete temp storage made in the conversion
365 delete[] cols;
366 delete[] storage;
367 }
368
369 /*
370 Assemble matrix, using the 2-step process:
371 MatAssemblyBegin(), MatAssemblyEnd()
372 Computations can be done while messages are in transition
373 by placing code between these two statements.
374 */
375 MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
376 MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
377
378 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
379 Create the linear solver and set various options
380 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
381 */
382 KSPCreate(PETSC_COMM_WORLD,&m_petsc_ksp);
383 KSPSetOperators(m_petsc_ksp,A,A);
384
385 //KSPSetType(m_petsc_ksp,KSPPREONLY);
386 KSPGetPC(m_petsc_ksp,&m_petsc_pc);
387 // hardwire a DIRECT SOLVER via MUMPS
388 //PCSetType(m_petsc_pc,PCLU);
389 //PCFactorSetMatSolverType(m_petsc_pc,MATSOLVERSUPERLU_DIST);
390 //PCFactorSetMatSolverType(m_petsc_pc,MATSOLVERMUMPS);
391 //PCFactorSetMatSolverType(m_petsc_pc,MATSOLVERMKL_PARDISO);
392 //PCFactorSetUpMatSolverType(m_petsc_pc);
393
394 KSPSetFromOptions(m_petsc_ksp);
395
396 KSPSetUp(m_petsc_ksp);
397 /* call MatGetFactor() to create F */
398 PCFactorGetMatrix(m_petsc_pc,&m_petsc_F);
399
400 /* sequential ordering */
401 //PetscInt ival,icntl;
402 //icntl = 7;
403 //ival = 2;
404 //MatMumpsSetIcntl(m_petsc_F,icntl,ival);
405
406 /* threshhold for row pivot detection */
407 //MatMumpsSetIcntl(m_petsc_F,24,1);
408 //icntl = 3;
409 //PetscReal val;
410 //val = 1.e-6;
411 //MatMumpsSetCntl(m_petsc_F,icntl,val);
412
413 /* compute determinant of A */
414 // MatMumpsSetIcntl(m_petsc_F,33,1);
415 /* not used unless we initialise PETSc using the command line options */
416 // KSPSetFromOptions(m_petsc_ksp);
417
418 /* Get info from matrix factors */
419
420 //KSPView(m_petsc_ksp, PETSC_VIEWER_STDOUT_WORLD);
421
422 MatDestroy(&A);
423 delete[] all_rows_nnz;
424
425#endif // check for PETSC_D/Z
426 }
double A(1.0)
initial hump amplitude

◆ solve()

template<typename _Type >
void CppNoddy::SparseLinearSystem< _Type >::solve

Solve the sparse system.

Definition at line 75 of file SparseLinearSystem.cpp.

75 {
76 if("petsc" == m_version) {
77 factorise();
79 } else { // we catch incorrect m_version choices in the ctor
80 std::string problem;
81 problem = "CppNoddy needs to be linked to PETSc to solve sparse\n";
82 problem += "linear systems.";
83 throw ExceptionExternal(problem);
84 }
85 }
void factorise()
Factorise the Ax=B system.
void solve_using_factorisation()
Resolve the same system using the same factorisation.

Referenced by main().

◆ solve_using_factorisation() [1/3]

template<typename _Type >
void CppNoddy::SparseLinearSystem< _Type >::solve_using_factorisation ( )

Resolve the same system using the same factorisation.

Referenced by main().

◆ solve_using_factorisation() [2/3]

void CppNoddy::SparseLinearSystem< double >::solve_using_factorisation ( )

Definition at line 242 of file SparseLinearSystem.cpp.

242 {
243#if !defined(PETSC_D)
244 std::string problem;
245 problem = "CppNoddy is linked against the COMPLEX version of PETSc\n";
246 problem += "but you are trying to solve a DOUBLE matrix. Either\n";
247 problem += "redefine your matrix as complex, or recompile with $PETSC_ARCH\n";
248 problem += "pointing to a DOUBLE version of the PETSc code.";
249 throw ExceptionExternal(problem);
250#endif
251#if defined(PETSC_D)
252 // size of the (assumed square) matrix
253 PetscInt n = m_pA -> nrows();
254
255 // populate the RHS vector using the CppNoddy DenseVector content
256 for(PetscInt i = 0; i < n; ++i) {
257 VecSetValue(m_petsc_B,i,m_pB->operator[](i),INSERT_VALUES);
258 }
259
260 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
261 Solve the linear system
262 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
263 KSPSolve(m_petsc_ksp,m_petsc_B,m_petsc_x);
264
265 /* We can now gather the parallel result back to ALL processes
266 This is temporary as the SparseMatrix is stored on each processes
267 and is too dumb for "proper" parallelization */
268 Vec y;
269 // a scatter context
270 VecScatter ctx = 0;
271 // map all elts of the parallel vector to a sequential copy
272 VecScatterCreateToAll(m_petsc_x,&ctx,&y);
273 // scatter it
274 VecScatterBegin(ctx,m_petsc_x,y,INSERT_VALUES,SCATTER_FORWARD);
275 VecScatterEnd(ctx,m_petsc_x,y,INSERT_VALUES,SCATTER_FORWARD);
276 // clean up
277 VecScatterDestroy(&ctx);
278 // this array is a pointer not a copy
279 PetscScalar* array;
280 VecGetArray(y,&array);
281 // now copy to the CppNoddy densevctor
282 for(PetscInt i=0; i<n; i++) {
283 m_pB -> operator[](i) = array[i];
284 }
285 // follow the docs and Restore after get
286 VecRestoreArray(m_petsc_x,&array);
287 VecDestroy(&y);
288#endif
289 }

◆ solve_using_factorisation() [3/3]

void CppNoddy::SparseLinearSystem< std::complex< double > >::solve_using_factorisation ( )

Definition at line 432 of file SparseLinearSystem.cpp.

432 {
433#if !defined(PETSC_Z)
434 std::string problem;
435 problem = "CppNoddy is linked against the DOUBLE version of PETSc\n";
436 problem += "but you are trying to solve e a COMPLEX matrix.\n";
437 problem += "Recompile with $PETSC_ARCH\n";
438 problem += "pointing to a COMPLEX version of the PETSc code.";
439 throw ExceptionExternal(problem);
440#endif
441#if defined(PETSC_Z)
442 // size of the (assumed square) matrix
443 PetscInt n = m_pA -> nrows();
444
445 // populate the RHS vector using the CppNoddy DenseVector content
446 for(PetscInt i = 0; i < n; ++i) {
447 VecSetValue(m_petsc_B,i,m_pB->operator[](i),INSERT_VALUES);
448 }
449
450 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
451 Solve the linear system
452 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
453 KSPSolve(m_petsc_ksp,m_petsc_B,m_petsc_x);
454
455 /* We can now gather the parallel result back to ALL processes
456 This is temporary as the SparseMatrix is stored on each processes
457 and is too dumb for "proper" parallelization */
458 Vec y;
459 // a scatter context
460 VecScatter ctx = 0;
461 // map all elts of the parallel vector to a sequential copy
462 VecScatterCreateToAll(m_petsc_x,&ctx,&y);
463 // scatter it
464 VecScatterBegin(ctx,m_petsc_x,y,INSERT_VALUES,SCATTER_FORWARD);
465 VecScatterEnd(ctx,m_petsc_x,y,INSERT_VALUES,SCATTER_FORWARD);
466 // clean up
467 VecScatterDestroy(&ctx);
468 // this array is a pointer not a copy
469 PetscScalar* array;
470 VecGetArray(y,&array);
471 // now copy to the CppNoddy densevctor
472 for(PetscInt i=0; i<n; i++) {
473 m_pB -> operator[](i) = array[i];
474 }
475 // follow the docs and Restore after get
476 VecRestoreArray(m_petsc_x,&array);
477 VecDestroy(&y);
478#endif
479 }

◆ temp_solve()

template<typename _Type >
void CppNoddy::SparseLinearSystem< _Type >::temp_solve ( )

The documentation for this class was generated from the following files:

© 2012

R.E. Hewitt