5#if defined(PETSC_D) || defined(PETSC_Z)
7#ifndef DISTRIBUTEDMATRIX_H
8#define DISTRIBUTEDMATRIX_H
20 template <
typename _Type>
21 class DistributedMatrix {
30 DistributedMatrix(
const PetscInt& rows,
const PetscInt& cols,
31 const PetscInt& nd,
const PetscInt& od ) {
36#if defined(PETSC_D) || defined(PETSC_Z)
38 MPI_Initialized(&flag);
41 problem =
"DistributedMatrix<> needs PETSc and therefore you must call \n";
42 problem +=
"PetscSession before instantiating the object.\n";
43 throw ExceptionRuntime(problem);
47 problem =
"DistributedMatrix<> needs PETSc.\n";
48 throw ExceptionRuntime(problem);
52 MatCreate(PETSC_COMM_WORLD,&m_A);
53 MatSetType(m_A,MATMPIAIJ);
55 MatSetSizes(m_A,PETSC_DECIDE,PETSC_DECIDE,rows,cols);
57 MatMPIAIJSetPreallocation(m_A, nd, NULL, od, NULL);
59 MatSetFromOptions(m_A);
63 MPI_Comm_size(MPI_COMM_WORLD,&m_size);
64 MPI_Comm_rank(MPI_COMM_WORLD,&m_rank);
67 PetscPrintf(PETSC_COMM_WORLD,
"[DEBUG] Creating a distributed matrix\n");
68 PetscInt row_start, row_end;
69 MatGetOwnershipRange(m_A,&row_start,&row_end);
70 PetscSynchronizedPrintf(PETSC_COMM_WORLD,
"[DEBUG] Rank = %D Start_row = %D End_row = %D \n", m_rank, row_start, row_end-1 );
71 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
93 void operator()(
const PetscInt& row,
const PetscInt& col,
const PetscScalar& value) {
94 set_elt( row, col, value );
104 void set_elt(
const PetscInt& row,
const PetscInt& col,
const PetscScalar& value) {
105 PetscInt row_start, row_end;
106 MatGetOwnershipRange(m_A,&row_start,&row_end);
107 if ( ( row >= row_start ) && ( row < row_end ) ) {
109 MatSetValues(m_A,1,&row,1,&col,&value,INSERT_VALUES);
117 void set_row(
const PetscInt& row,
const DenseVector<int>& cols,
const DenseVector<_Type>& values) {
118 PetscInt row_start, row_end;
119 MatGetOwnershipRange(m_A,&row_start,&row_end);
120 if ( ( row >= row_start ) && ( row < row_end ) ) {
121 PetscInt nnz_cols = cols.size();
123 MatSetValues(m_A,1,&row,nnz_cols,&cols[0],&values[0],INSERT_VALUES);
130 void final_assembly() {
131 MatAssemblyBegin(m_A,MAT_FINAL_ASSEMBLY);
132 MatAssemblyEnd(m_A,MAT_FINAL_ASSEMBLY);
137 PetscInt row_start, row_end;
138 MatGetOwnershipRange(m_A,&row_start,&row_end);
139 PetscPrintf(PETSC_COMM_WORLD,
"Matrix view to stdout:\n");
140 PetscSynchronizedPrintf(PETSC_COMM_WORLD,
"Processor rank = %D, start row = %D end row = %D \n", m_rank, row_start, row_end-1 );
141 PetscSynchronizedPrintf(PETSC_COMM_WORLD,
" Diagonal entry is a %Dx%D square.\n",row_end-row_start,row_end-row_start);
142 PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
143 MatView(m_A,PETSC_VIEWER_STDOUT_WORLD);
153 PetscMPIInt m_rank, m_size;
Specification for a templated DenseVector class – a dense, dynamic, vector object.
The collection of CppNoddy exceptions.
A templated SparseVector class – a sparse, variable size, vector object.
A collection of OO numerical routines aimed at simple (typical) applied problems in continuum mechani...