20 #include "libmesh/libmesh_config.h"
22 #ifdef LIBMESH_HAVE_PETSC
25 #include "libmesh/petsc_matrix.h"
26 #include "libmesh/dof_map.h"
27 #include "libmesh/dense_matrix.h"
28 #include "libmesh/petsc_vector.h"
29 #include "libmesh/parallel.h"
30 #include "libmesh/utility.h"
31 #include "libmesh/wrapped_petsc.h"
34 #ifdef LIBMESH_HAVE_UNISTD_H
39 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
43 using namespace libMesh;
49 void transform_preallocation_arrays (
const PetscInt blocksize,
50 const std::vector<numeric_index_type> & n_nz,
51 const std::vector<numeric_index_type> & n_oz,
52 std::vector<numeric_index_type> & b_n_nz,
53 std::vector<numeric_index_type> & b_n_oz)
55 libmesh_assert_equal_to (n_nz.size(), n_oz.size());
56 libmesh_assert_equal_to (n_nz.size()%blocksize, 0);
58 b_n_nz.clear(); b_n_nz.reserve(n_nz.size()/blocksize);
59 b_n_oz.clear(); b_n_oz.reserve(n_oz.size()/blocksize);
61 for (std::size_t nn=0, nnzs=n_nz.size(); nn<nnzs; nn += blocksize)
63 b_n_nz.push_back (n_nz[nn]/blocksize);
64 b_n_oz.push_back (n_oz[nn]/blocksize);
86 _destroy_mat_on_exit(true),
96 const Parallel::Communicator & comm_in) :
98 _destroy_mat_on_exit(false),
108 template <
typename T>
114 template <
typename T>
117 _mat_type = mat_type;
120 template <
typename T>
139 PetscErrorCode ierr = 0;
140 PetscInt m_global =
static_cast<PetscInt
>(m_in);
141 PetscInt n_global =
static_cast<PetscInt
>(n_in);
142 PetscInt m_local =
static_cast<PetscInt
>(m_l);
143 PetscInt n_local =
static_cast<PetscInt
>(n_l);
144 PetscInt n_nz =
static_cast<PetscInt
>(nnz);
145 PetscInt n_oz =
static_cast<PetscInt
>(noz);
147 ierr = MatCreate(this->comm().
get(), &_mat);
148 LIBMESH_CHKERR(ierr);
149 ierr = MatSetSizes(_mat, m_local, n_local, m_global, n_global);
150 LIBMESH_CHKERR(ierr);
151 PetscInt blocksize =
static_cast<PetscInt
>(blocksize_in);
152 ierr = MatSetBlockSize(_mat,blocksize);
153 LIBMESH_CHKERR(ierr);
155 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
160 libmesh_assert_equal_to (m_local % blocksize, 0);
161 libmesh_assert_equal_to (n_local % blocksize, 0);
162 libmesh_assert_equal_to (m_global % blocksize, 0);
163 libmesh_assert_equal_to (n_global % blocksize, 0);
164 libmesh_assert_equal_to (n_nz % blocksize, 0);
165 libmesh_assert_equal_to (n_oz % blocksize, 0);
167 ierr = MatSetType(_mat, MATBAIJ);
168 LIBMESH_CHKERR(ierr);
173 ierr = MatSetOptionsPrefix(_mat,
"");
174 LIBMESH_CHKERR(ierr);
175 ierr = MatSetFromOptions(_mat);
176 LIBMESH_CHKERR(ierr);
177 ierr = MatSeqBAIJSetPreallocation(_mat, blocksize, n_nz/blocksize, NULL);
178 LIBMESH_CHKERR(ierr);
179 ierr = MatMPIBAIJSetPreallocation(_mat, blocksize,
180 n_nz/blocksize, NULL,
181 n_oz/blocksize, NULL);
182 LIBMESH_CHKERR(ierr);
189 ierr = MatSetType(_mat, MATAIJ);
190 LIBMESH_CHKERR(ierr);
195 ierr = MatSetOptionsPrefix(_mat,
"");
196 LIBMESH_CHKERR(ierr);
197 ierr = MatSetFromOptions(_mat);
198 LIBMESH_CHKERR(ierr);
199 ierr = MatSeqAIJSetPreallocation(_mat, n_nz, NULL);
200 LIBMESH_CHKERR(ierr);
201 ierr = MatMPIAIJSetPreallocation(_mat, n_nz, NULL, n_oz, NULL);
202 LIBMESH_CHKERR(ierr);
206 #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
207 ierr = MatSetType(_mat, MATHYPRE);
212 LIBMESH_CHKERR(ierr);
213 ierr = MatSetOptionsPrefix(_mat,
"");
214 LIBMESH_CHKERR(ierr);
215 ierr = MatSetFromOptions(_mat);
216 LIBMESH_CHKERR(ierr);
217 ierr = MatHYPRESetPreallocation(_mat, n_nz, NULL, n_oz, NULL);
218 LIBMESH_CHKERR(ierr);
220 libmesh_error_msg(
"PETSc 3.9.4 or higher with hypre is required for MatHypre");
224 default: libmesh_error_msg(
"Unsupported petsc matrix type");
229 ierr = MatSetOption(_mat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
230 LIBMESH_CHKERR(ierr);
236 template <
typename T>
241 const std::vector<numeric_index_type> & n_nz,
242 const std::vector<numeric_index_type> & n_oz,
248 PetscInt blocksize =
static_cast<PetscInt
>(blocksize_in);
257 libmesh_assert_equal_to (n_nz.size(), m_l);
258 libmesh_assert_equal_to (n_oz.size(), m_l);
260 PetscErrorCode ierr = 0;
261 PetscInt m_global =
static_cast<PetscInt
>(m_in);
262 PetscInt n_global =
static_cast<PetscInt
>(n_in);
263 PetscInt m_local =
static_cast<PetscInt
>(m_l);
264 PetscInt n_local =
static_cast<PetscInt
>(n_l);
266 ierr = MatCreate(this->comm().
get(), &_mat);
267 LIBMESH_CHKERR(ierr);
268 ierr = MatSetSizes(_mat, m_local, n_local, m_global, n_global);
269 LIBMESH_CHKERR(ierr);
270 ierr = MatSetBlockSize(_mat,blocksize);
271 LIBMESH_CHKERR(ierr);
273 #ifdef LIBMESH_ENABLE_BLOCKED_STORAGE
278 libmesh_assert_equal_to (m_local % blocksize, 0);
279 libmesh_assert_equal_to (n_local % blocksize, 0);
280 libmesh_assert_equal_to (m_global % blocksize, 0);
281 libmesh_assert_equal_to (n_global % blocksize, 0);
283 ierr = MatSetType(_mat, MATBAIJ);
284 LIBMESH_CHKERR(ierr);
289 LIBMESH_CHKERR(ierr);
290 ierr = MatSetOptionsPrefix(_mat,
"");
291 LIBMESH_CHKERR(ierr);
292 ierr = MatSetFromOptions(_mat);
293 LIBMESH_CHKERR(ierr);
295 std::vector<numeric_index_type> b_n_nz, b_n_oz;
297 transform_preallocation_arrays (blocksize,
301 ierr = MatSeqBAIJSetPreallocation (_mat,
305 LIBMESH_CHKERR(ierr);
307 ierr = MatMPIBAIJSetPreallocation (_mat,
313 LIBMESH_CHKERR(ierr);
320 ierr = MatSetType(_mat, MATAIJ);
321 LIBMESH_CHKERR(ierr);
326 LIBMESH_CHKERR(ierr);
327 ierr = MatSetOptionsPrefix(_mat,
"");
328 LIBMESH_CHKERR(ierr);
329 ierr = MatSetFromOptions(_mat);
330 LIBMESH_CHKERR(ierr);
331 ierr = MatSeqAIJSetPreallocation (_mat,
334 LIBMESH_CHKERR(ierr);
335 ierr = MatMPIAIJSetPreallocation (_mat,
343 #if !PETSC_VERSION_LESS_THAN(3,9,4) && LIBMESH_HAVE_PETSC_HYPRE
344 ierr = MatSetType(_mat, MATHYPRE);
345 LIBMESH_CHKERR(ierr);
350 LIBMESH_CHKERR(ierr);
351 ierr = MatSetOptionsPrefix(_mat,
"");
352 LIBMESH_CHKERR(ierr);
353 ierr = MatSetFromOptions(_mat);
354 LIBMESH_CHKERR(ierr);
355 ierr = MatHYPRESetPreallocation (_mat,
360 LIBMESH_CHKERR(ierr);
362 libmesh_error_msg(
"PETSc 3.9.4 or higher with hypre is required for MatHypre");
366 default: libmesh_error_msg(
"Unsupported petsc matrix type");
372 ierr = MatSetOption(_mat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
373 LIBMESH_CHKERR(ierr);
378 template <
typename T>
381 libmesh_assert(this->_dof_map);
384 const numeric_index_type m_l = this->_dof_map->n_dofs_on_processor(this->processor_id());
386 const std::vector<numeric_index_type> & n_nz = this->_sp->get_n_nz();
387 const std::vector<numeric_index_type> & n_oz = this->_sp->get_n_oz();
389 PetscInt blocksize =
static_cast<PetscInt
>(this->_dof_map->block_size());
391 this->init(m_in, m_in, m_l, m_l, n_nz, n_oz, blocksize);
395 template <
typename T>
398 libmesh_not_implemented();
401 template <
typename T>
404 #if !PETSC_VERSION_LESS_THAN(3,9,0)
407 auto ierr = MatResetPreallocation(_mat);
408 LIBMESH_CHKERR(ierr);
410 libmesh_warning(
"Your version of PETSc doesn't support resetting of "
411 "preallocation, so we will use your most recent sparsity "
412 "pattern. This may result in a degradation of performance\n");
416 template <
typename T>
423 PetscErrorCode ierr=0;
427 ierr = MatGetLocalSize(_mat,&m_l,&n_l);
428 LIBMESH_CHKERR(ierr);
432 ierr = MatZeroEntries(_mat);
433 LIBMESH_CHKERR(ierr);
437 template <
typename T>
444 PetscErrorCode ierr=0;
451 ierr = MatZeroRows(_mat, cast_int<PetscInt>(rows.size()),
455 ierr = MatZeroRows(_mat, 0, NULL,
PS(diag_value), NULL, NULL);
457 LIBMESH_CHKERR(ierr);
460 template <
typename T>
463 libmesh_error_msg_if(!this->
closed(),
"Matrix must be closed before it can be cloned!");
467 PetscErrorCode ierr = MatDuplicate(_mat, MAT_DO_NOT_COPY_VALUES, ©);
468 LIBMESH_CHKERR(ierr);
472 auto ret = std::make_unique<PetscMatrix<T>>(copy, this->comm());
473 ret->set_destroy_mat_on_exit(
true);
477 return std::unique_ptr<SparseMatrix<T>>(ret.release());
482 template <
typename T>
485 libmesh_error_msg_if(!this->
closed(),
"Matrix must be closed before it can be cloned!");
489 PetscErrorCode ierr = MatDuplicate(_mat, MAT_COPY_VALUES, ©);
490 LIBMESH_CHKERR(ierr);
494 auto ret = std::make_unique<PetscMatrix<T>>(copy, this->comm());
495 ret->set_destroy_mat_on_exit(
true);
499 return std::unique_ptr<SparseMatrix<T>>(ret.release());
502 template <
typename T>
505 if ((this->
initialized()) && (this->_destroy_mat_on_exit))
507 exceptionless_semiparallel_only();
511 PetscErrorCode ierr = MatDestroy (&_mat);
513 libmesh_warning(
"Warning: MatDestroy returned a non-zero error code which we ignored.");
521 template <
typename T>
528 PetscErrorCode ierr=0;
529 PetscReal petsc_value;
532 libmesh_assert (this->
closed());
534 ierr = MatNorm(_mat, NORM_1, &petsc_value);
535 LIBMESH_CHKERR(ierr);
537 value =
static_cast<Real>(petsc_value);
544 template <
typename T>
551 PetscErrorCode ierr=0;
552 PetscReal petsc_value;
555 libmesh_assert (this->
closed());
557 ierr = MatNorm(_mat, NORM_INFINITY, &petsc_value);
558 LIBMESH_CHKERR(ierr);
560 value =
static_cast<Real>(petsc_value);
567 template <
typename T>
576 libmesh_deprecated();
577 libmesh_warning(
"The matrix must be assembled before calling PetscMatrix::print_matlab().\n"
578 "Please update your code, as this warning will become an error in a future release.");
582 PetscErrorCode ierr = 0;
585 ierr = PetscViewerCreate (this->comm().
get(),
587 LIBMESH_CHKERR(ierr);
593 ierr = PetscViewerASCIIOpen( this->comm().
get(),
596 LIBMESH_CHKERR(ierr);
598 #if PETSC_VERSION_LESS_THAN(3,7,0)
599 ierr = PetscViewerSetFormat (petsc_viewer,
600 PETSC_VIEWER_ASCII_MATLAB);
602 ierr = PetscViewerPushFormat (petsc_viewer,
603 PETSC_VIEWER_ASCII_MATLAB);
606 LIBMESH_CHKERR(ierr);
608 ierr = MatView (_mat, petsc_viewer);
609 LIBMESH_CHKERR(ierr);
615 #if PETSC_VERSION_LESS_THAN(3,7,0)
616 ierr = PetscViewerSetFormat (PETSC_VIEWER_STDOUT_WORLD,
617 PETSC_VIEWER_ASCII_MATLAB);
619 ierr = PetscViewerPushFormat (PETSC_VIEWER_STDOUT_WORLD,
620 PETSC_VIEWER_ASCII_MATLAB);
623 LIBMESH_CHKERR(ierr);
625 ierr = MatView (_mat, PETSC_VIEWER_STDOUT_WORLD);
626 LIBMESH_CHKERR(ierr);
634 template <
typename T>
651 libmesh_deprecated();
652 libmesh_warning(
"The matrix must be assembled before calling PetscMatrix::print_personal().\n"
653 "Please update your code, as this warning will become an error in a future release.");
657 PetscErrorCode ierr=0;
660 if (os.rdbuf() == std::cout.rdbuf())
662 ierr = MatView(_mat, NULL);
663 LIBMESH_CHKERR(ierr);
671 std::string temp_filename;
675 char c[] =
"temp_petsc_matrix.XXXXXX";
680 if (this->processor_id() == 0)
685 libmesh_error_msg_if(fd == -1,
"mkstemp failed in PetscMatrix::print_personal()");
697 this->comm().broadcast(temp_filename);
700 PetscViewer petsc_viewer;
706 ierr = PetscViewerASCIIOpen( this->comm().
get(),
707 temp_filename.c_str(),
709 LIBMESH_CHKERR(ierr);
717 ierr = MatView (_mat, petsc_viewer);
718 LIBMESH_CHKERR(ierr);
720 if (this->processor_id() == 0)
724 std::ifstream input_stream(temp_filename.c_str());
725 os << input_stream.rdbuf();
729 input_stream.close();
730 std::remove(temp_filename.c_str());
740 template <
typename T>
742 const std::vector<numeric_index_type> & rows,
743 const std::vector<numeric_index_type> & cols)
750 libmesh_assert_equal_to (rows.size(), n_rows);
751 libmesh_assert_equal_to (cols.size(), n_cols);
753 PetscErrorCode ierr=0;
754 ierr = MatSetValues(_mat,
759 LIBMESH_CHKERR(ierr);
767 template <
typename T>
769 const std::vector<numeric_index_type> & brows,
770 const std::vector<numeric_index_type> & bcols)
775 cast_int<numeric_index_type>(brows.size());
777 cast_int<numeric_index_type>(bcols.size());
779 PetscErrorCode ierr=0;
783 cast_int<numeric_index_type>(dm.
m());
785 cast_int<numeric_index_type>(dm.
n());
788 libmesh_assert_equal_to (n_cols / n_bcols, blocksize);
789 libmesh_assert_equal_to (blocksize*n_brows, n_rows);
790 libmesh_assert_equal_to (blocksize*n_bcols, n_cols);
792 PetscInt petsc_blocksize;
793 ierr = MatGetBlockSize(_mat, &petsc_blocksize);
794 LIBMESH_CHKERR(ierr);
795 libmesh_assert_equal_to (blocksize, static_cast<numeric_index_type>(petsc_blocksize));
799 ierr = MatSetValuesBlocked(_mat,
804 LIBMESH_CHKERR(ierr);
811 template <
typename T>
813 const std::vector<numeric_index_type> & rows,
814 const std::vector<numeric_index_type> & cols,
815 const bool reuse_submatrix)
const
819 libmesh_deprecated();
820 libmesh_warning(
"The matrix must be assembled before calling PetscMatrix::create_submatrix().\n"
821 "Please update your code, as this warning will become an error in a future release.");
826 PetscMatrix<T> * petsc_submatrix = cast_ptr<PetscMatrix<T> *>(&submatrix);
834 PetscErrorCode ierr=0;
837 ierr = ISCreateGeneral(this->comm().
get(),
838 cast_int<PetscInt>(rows.size()),
841 isrow.
get()); LIBMESH_CHKERR(ierr);
844 ierr = ISCreateGeneral(this->comm().
get(),
845 cast_int<PetscInt>(cols.size()),
848 iscol.
get()); LIBMESH_CHKERR(ierr);
851 ierr = LibMeshCreateSubMatrix(_mat,
854 (reuse_submatrix ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX),
855 &(petsc_submatrix->
_mat)); LIBMESH_CHKERR(ierr);
859 petsc_submatrix->
close();
862 template <
typename T>
864 const std::vector<numeric_index_type> & rows,
865 const std::vector<numeric_index_type> & cols)
const
869 libmesh_deprecated();
870 libmesh_warning(
"The matrix must be assembled before calling PetscMatrix::create_submatrix_nosort().\n"
871 "Please update your code, as this warning will become an error in a future release.");
876 PetscMatrix<T> * petsc_submatrix = cast_ptr<PetscMatrix<T> *>(&submatrix);
878 PetscErrorCode ierr=0;
880 ierr=MatZeroEntries(petsc_submatrix->
_mat);
881 LIBMESH_CHKERR(ierr);
883 PetscInt pc_ncols = 0;
884 const PetscInt * pc_cols;
885 const PetscScalar * pc_vals;
888 std::vector<PetscInt> sub_cols;
889 std::vector<PetscScalar> sub_vals;
891 for (
auto i : index_range(rows))
893 PetscInt sub_rid[] = {
static_cast<PetscInt
>(i)};
894 PetscInt rid =
static_cast<PetscInt
>(rows[i]);
896 if (rows[i]>= this->row_start() && rows[i]< this->row_stop())
899 ierr = MatGetRow(_mat, rid, &pc_ncols, &pc_cols, &pc_vals);
900 LIBMESH_CHKERR(ierr);
902 for (
auto j : index_range(cols))
904 for (
unsigned int idx = 0; idx< static_cast<unsigned int>(pc_ncols); idx++)
906 if (pc_cols[idx] == static_cast<PetscInt>(cols[j]))
908 sub_cols.push_back(static_cast<PetscInt>(j));
909 sub_vals.push_back(pc_vals[idx]);
914 ierr = MatSetValues(petsc_submatrix->
_mat,
917 static_cast<PetscInt>(sub_vals.size()),
921 LIBMESH_CHKERR(ierr);
922 ierr = MatRestoreRow(_mat, rid, &pc_ncols, &pc_cols, &pc_vals);
923 LIBMESH_CHKERR(ierr);
929 MatAssemblyBeginEnd(petsc_submatrix->comm(), petsc_submatrix->
_mat, MAT_FINAL_ASSEMBLY);
932 petsc_submatrix->
close();
936 template <
typename T>
943 PetscErrorCode ierr =
944 MatGetDiagonal(
const_cast<PetscMatrix<T> *
>(
this)->mat(),petsc_dest.
vec()); LIBMESH_CHKERR(ierr);
949 template <
typename T>
957 if (&petsc_dest !=
this)
961 if (&petsc_dest ==
this)
964 #if PETSC_VERSION_LESS_THAN(3,7,0)
965 ierr = MatTranspose(_mat,MAT_REUSE_MATRIX,&petsc_dest.
_mat);
967 ierr = MatTranspose(_mat, MAT_INPLACE_MATRIX, &petsc_dest.
_mat);
970 ierr = MatTranspose(_mat,MAT_INITIAL_MATRIX,&petsc_dest.
_mat);
971 LIBMESH_CHKERR(ierr);
982 template <
typename T>
994 MatAssemblyBeginEnd(this->comm(), _mat, MAT_FINAL_ASSEMBLY);
997 template <
typename T>
1000 semiparallel_only();
1002 MatAssemblyBeginEnd(this->comm(), _mat, MAT_FLUSH_ASSEMBLY);
1007 template <
typename T>
1012 PetscInt petsc_m=0, petsc_n=0;
1013 PetscErrorCode ierr=0;
1015 ierr = MatGetSize (_mat, &petsc_m, &petsc_n);
1016 LIBMESH_CHKERR(ierr);
1021 template <
typename T>
1028 auto ierr = MatGetLocalSize (_mat, &m, NULL);
1029 LIBMESH_CHKERR(ierr);
1034 template <
typename T>
1039 PetscInt petsc_m=0, petsc_n=0;
1040 PetscErrorCode ierr=0;
1042 ierr = MatGetSize (_mat, &petsc_m, &petsc_n);
1043 LIBMESH_CHKERR(ierr);
1048 template <
typename T>
1055 auto ierr = MatGetLocalSize (_mat, NULL, &n);
1056 LIBMESH_CHKERR(ierr);
1061 template <
typename T>
1067 PetscInt petsc_m = 0, petsc_n = 0;
1069 auto ierr = MatGetLocalSize (_mat, &petsc_m, &petsc_n);
1070 LIBMESH_CHKERR(ierr);
1076 template <
typename T>
1081 PetscInt start=0,
stop=0;
1082 PetscErrorCode ierr=0;
1084 ierr = MatGetOwnershipRange(_mat, &start, &
stop);
1085 LIBMESH_CHKERR(ierr);
1092 template <
typename T>
1097 PetscInt start=0,
stop=0;
1098 PetscErrorCode ierr=0;
1100 ierr = MatGetOwnershipRange(_mat, &start, &
stop);
1101 LIBMESH_CHKERR(ierr);
1108 template <
typename T>
1115 PetscErrorCode ierr=0;
1116 PetscInt i_val=i, j_val=j;
1118 PetscScalar petsc_value =
static_cast<PetscScalar
>(value);
1119 ierr = MatSetValues(_mat, 1, &i_val, 1, &j_val,
1120 &petsc_value, INSERT_VALUES);
1121 LIBMESH_CHKERR(ierr);
1126 template <
typename T>
1133 PetscErrorCode ierr=0;
1134 PetscInt i_val=i, j_val=j;
1136 PetscScalar petsc_value =
static_cast<PetscScalar
>(value);
1137 ierr = MatSetValues(_mat, 1, &i_val, 1, &j_val,
1138 &petsc_value, ADD_VALUES);
1139 LIBMESH_CHKERR(ierr);
1144 template <
typename T>
1146 const std::vector<numeric_index_type> & dof_indices)
1148 this->add_matrix (dm, dof_indices, dof_indices);
1157 template <
typename T>
1164 libmesh_assert_equal_to (this->m(), X_in.
m());
1165 libmesh_assert_equal_to (this->n(), X_in.
n());
1167 PetscScalar a =
static_cast<PetscScalar
> (a_in);
1168 const PetscMatrix<T> * X = cast_ptr<const PetscMatrix<T> *> (&X_in);
1172 PetscErrorCode ierr=0;
1175 libmesh_assert(X->
closed());
1177 semiparallel_only();
1179 ierr = MatAXPY(_mat, a, X->
_mat, DIFFERENT_NONZERO_PATTERN);
1180 LIBMESH_CHKERR(ierr);
1184 template <
typename T>
1191 libmesh_assert_equal_to (this->n(), X_in.
m());
1193 const PetscMatrix<T> * X = cast_ptr<const PetscMatrix<T> *> (&X_in);
1196 PetscErrorCode ierr = 0;
1199 libmesh_assert(X->closed());
1201 semiparallel_only();
1205 ierr = MatMatMult(_mat, X->_mat, MAT_REUSE_MATRIX, PETSC_DEFAULT, &Y->_mat);
1206 LIBMESH_CHKERR(ierr);
1211 ierr = MatMatMult(_mat, X->_mat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Y->_mat);
1212 LIBMESH_CHKERR(ierr);
1216 Y->_is_initialized =
true;
1219 template <
typename T>
1222 const std::map<numeric_index_type, numeric_index_type> & row_ltog,
1223 const std::map<numeric_index_type, numeric_index_type> & col_ltog,
1228 libmesh_assert_greater_equal(spm.
m(), row_ltog.size());
1229 libmesh_assert_greater_equal(spm.
n(), col_ltog.size());
1232 libmesh_assert_greater_equal(this->m(), spm.
m());
1233 libmesh_assert_greater_equal(this->n(), spm.
n());
1238 PetscErrorCode ierr = 0;
1240 auto pscm = cast_ptr<const PetscMatrix<T> *>(&spm);
1244 const PetscInt * lcols;
1245 const PetscScalar * vals;
1247 std::vector<PetscInt> gcols;
1248 std::vector<PetscScalar> values;
1250 for (
auto ltog : row_ltog)
1252 PetscInt grow[] = {
static_cast<PetscInt
>(ltog.second)};
1254 ierr = MatGetRow(pscm->_mat, static_cast<PetscInt>(ltog.first), &ncols, &lcols, &vals);
1255 LIBMESH_CHKERR(ierr);
1258 gcols.resize(ncols);
1259 values.resize(ncols);
1260 for (
auto i : index_range(gcols))
1262 gcols[i] = libmesh_map_find(col_ltog, lcols[i]);
1263 values[i] =
PS(scalar) * vals[i];
1266 ierr = MatSetValues(_mat, 1, grow, ncols, gcols.data(), values.data(), ADD_VALUES);
1267 LIBMESH_CHKERR(ierr);
1268 ierr = MatRestoreRow(pscm->_mat, static_cast<PetscInt>(ltog.first), &ncols, &lcols, &vals);
1269 LIBMESH_CHKERR(ierr);
1275 template <
typename T>
1282 const PetscScalar * petsc_row;
1283 const PetscInt * petsc_cols;
1292 i_val=
static_cast<PetscInt
>(i_in),
1293 j_val=static_cast<PetscInt>(j_in);
1300 libmesh_assert(this->
closed());
1302 ierr = MatGetRow(_mat, i_val, &ncols, &petsc_cols, &petsc_row);
1303 LIBMESH_CHKERR(ierr);
1307 std::pair<const PetscInt *, const PetscInt *> p =
1308 std::equal_range (petsc_cols, petsc_cols + ncols, j_val);
1311 if (p.first != p.second)
1315 const std::size_t j =
1316 std::distance (const_cast<PetscInt *>(petsc_cols),
1317 const_cast<PetscInt *>(p.first));
1319 libmesh_assert_less (j, ncols);
1320 libmesh_assert_equal_to (petsc_cols[j], j_val);
1322 value =
static_cast<T
> (petsc_row[j]);
1325 ierr = MatRestoreRow(_mat, i_val,
1326 &ncols, &petsc_cols, &petsc_row);
1327 LIBMESH_CHKERR(ierr);
1332 template <
typename T>
1334 std::vector<numeric_index_type> & indices,
1335 std::vector<T> & values)
const
1339 const PetscScalar * petsc_row;
1340 const PetscInt * petsc_cols;
1342 PetscErrorCode ierr=0;
1345 i_val =
static_cast<PetscInt
>(i_in);
1351 libmesh_assert(this->
closed());
1363 #ifdef LIBMESH_HAVE_CXX11_THREAD
1364 std::lock_guard<std::mutex>
1366 Threads::spin_mutex::scoped_lock
1368 lock(_petsc_matrix_mutex);
1370 ierr = MatGetRow(_mat, i_val, &ncols, &petsc_cols, &petsc_row);
1371 LIBMESH_CHKERR(ierr);
1374 indices.resize(static_cast<std::size_t>(ncols));
1375 values.resize(static_cast<std::size_t>(ncols));
1377 for (
auto i : index_range(indices))
1380 values[i] =
static_cast<T
>(petsc_row[i]);
1383 ierr = MatRestoreRow(_mat, i_val,
1384 &ncols, &petsc_cols, &petsc_row);
1385 LIBMESH_CHKERR(ierr);
1389 template <
typename T>
1394 PetscErrorCode ierr=0;
1395 PetscBool assembled;
1397 ierr = MatAssembled(_mat, &assembled);
1398 LIBMESH_CHKERR(ierr);
1400 return (assembled == PETSC_TRUE);
1403 template <
typename T>
1406 this->_destroy_mat_on_exit = destroy;
1410 template <
typename T>
1413 std::swap(_mat, m_in.
_mat);
1426 #endif // #ifdef LIBMESH_HAVE_PETSC
bool closed()
Checks that the library has been closed.
void reset_preallocation()
重置矩阵以使用用户提供的原始非零模式。
virtual T operator()(const numeric_index_type i, const numeric_index_type j) const override
获取矩阵中特定位置的值。
该类提供了一个良好的接口,用于访问 PETSc 的 Vec 对象。所有重写的虚拟函数都在 numeric_vector.h 中有文档说明。
void set_matrix_type(PetscMatrixType mat_type)
设置矩阵的类型(如稀疏矩阵类型)。
virtual void print_matlab(const std::string &name="") const override
将矩阵内容以 MATLAB 格式打印出来。
unsigned int n() const
返回矩阵的列维度。
bool _destroy_mat_on_exit
此布尔值仅应在接受 PETSc Mat 对象的构造函数中设置为 false。
virtual numeric_index_type local_m() const final
获取由该进程拥有的矩阵行数。
virtual void zero() override
将矩阵所有元素置为零。
PetscMatrix(const Parallel::Communicator &comm_in)
构造函数:将矩阵初始化为空,没有任何结构。这个构造函数只适用于类的成员矩阵。 所有其他矩阵应该在所有必要信息都可用的数据流中的某一点创建。
virtual void init(const numeric_index_type m, const numeric_index_type n, const numeric_index_type m_l, const numeric_index_type n_l, const numeric_index_type nnz=30, const numeric_index_type noz=10, const numeric_index_type blocksize=1) override
初始化 PETSc 矩阵。
virtual void clear() noexceptoverride
clear() 在析构函数中调用,因此它不应该抛出异常。
void get_local_size(numeric_index_type &m, numeric_index_type &n) const
获取由该进程拥有的矩阵行数和列数。
virtual numeric_index_type n() const override
获取矩阵的列数。
void update_preallocation_and_zero()
更新基于 dof_map 的稀疏模式,并将矩阵置零。这在稀疏模式在计算过程中发生变化的情况下很有用。
virtual void add_block_matrix(const DenseMatrix< T > &dm, const std::vector< numeric_index_type > &brows, const std::vector< numeric_index_type > &bcols) override
将稠密块矩阵添加到此稀疏矩阵的指定块行和块列。
PetscScalar * pPS(T *ptr)
virtual void print_personal(std::ostream &os=libMesh::out) const override
使用 PETSc 查看器将矩阵的内容打印到屏幕。 由于我们限制了一个 PETSc 实现,所以此函数仅允许打印到标准输出。
virtual std::unique_ptr< SparseMatrix< T > > zero_clone() const override
创建一个新的空矩阵,并将其所有元素置为零。
void swap(PetscMatrix< T > &)
交换两个 PetscMatrix 的内部数据指针,不交换实际值。
numeric_index_type local_n() const
获取由该进程拥有的矩阵列数。
virtual void add_sparse_matrix(const SparseMatrix< T > &spm, const std::map< numeric_index_type, numeric_index_type > &row_ltog, const std::map< numeric_index_type, numeric_index_type > &col_ltog, const T scalar) override
将标量乘以稀疏矩阵 spm 添加到此矩阵的指定行和列。
Vec vec()
获取当前向量的原始 PETSc Vec 指针。
virtual std::unique_ptr< SparseMatrix< T > > clone() const override
克隆当前矩阵。
virtual void clear()=0
恢复 SparseMatrix<T> 到原始状态。
PetscInt * numeric_petsc_cast(const numeric_index_type *p)
virtual bool initialized() const
这是一个通用的稀疏矩阵类。该类包含了必须在派生类中覆盖的纯虚拟成员。 使用一个公共的基类允许从不同的求解器包中以不同的格式统一访问稀疏矩阵。
virtual ~PetscMatrix()
析构函数,释放矩阵资源。
void libmesh_ignore(const Args &...)
dof_id_type numeric_index_type
bool _is_initialized
Flag that tells if init() has been called.
unsigned int m() const
返回矩阵的行维度。
bool _is_initialized
标志,指示矩阵是否已初始化。
virtual numeric_index_type m() const =0
T * get()
获取托管对象的指针。 这用于模拟以下代码: KSP ksp; KSPCreate(comm, &ksp); 因为在这种上下文中,取包装对象的地址是没有意义的。
void stop(const char *file, int line, const char *date, const char *time)
virtual numeric_index_type row_stop() const override
获取本地处理的行的结束索引。
virtual void get_row(numeric_index_type i, std::vector< numeric_index_type > &indices, std::vector< T > &values) const override
获取矩阵中指定行的索引和值。
virtual void create_submatrix_nosort(SparseMatrix< T > &submatrix, const std::vector< numeric_index_type > &rows, const std::vector< numeric_index_type > &cols) const override
根据给定的 rows 和 cols 向量中的索引创建一个子矩阵,类似于 create_submatrix 函数。
std::vector< T > & get_values()
返回对应于存储向量的引用的底层数据存储矢量。
virtual void set_destroy_mat_on_exit(bool destroy=true)
设置析构时是否删除 Mat 对象。如果设置为 false,则允许 PETSc 管理它。
virtual numeric_index_type m() const override
获取矩阵的行数。
virtual void matrix_matrix_mult(SparseMatrix< T > &X, SparseMatrix< T > &Y, bool reuse=false) override
计算 Y = A*X,其中 A 是当前矩阵,X 是另一个矩阵。
virtual void add(const numeric_index_type i, const numeric_index_type j, const T value) override
在矩阵的特定位置添加值。
virtual void get_diagonal(NumericVector< T > &dest) const override
获取矩阵的对角线并将其存储在给定的向量中。
DIE A HORRIBLE DEATH HERE typedef LIBMESH_DEFAULT_SCALAR_TYPE Real
virtual bool closed() const override
检查矩阵是否已经关闭,即所有的插入和添加操作是否已经完成。
virtual numeric_index_type row_start() const override
获取本地处理的行的起始索引。
virtual void get_transpose(SparseMatrix< T > &dest) const override
获取矩阵的转置。
virtual Real l1_norm() const override
计算矩阵的 L1 范数。
virtual void add_matrix(const DenseMatrix< T > &dm, const std::vector< numeric_index_type > &rows, const std::vector< numeric_index_type > &cols) override
将稠密矩阵添加到此稀疏矩阵的指定行和列。
virtual void set(const numeric_index_type i, const numeric_index_type j, const T value) override
设置矩阵中特定位置的值。
这个类提供了一个方便的接口,用于操作并行稀疏矩阵的 PETSc C 库数据结构。 所有覆盖的虚拟函数都在 sparse_matrix.h 中有详细的文档说明。
bool initialized()
Checks that library initialization has been done.
Mat _mat
用于存储值的 PETSc 矩阵数据类型。
virtual void _get_submatrix(SparseMatrix< T > &submatrix, const std::vector< numeric_index_type > &rows, const std::vector< numeric_index_type > &cols, const bool reuse_submatrix) const override
创建或重新初始化一个由 rows 和 cols 向量中给出的索引定义的矩阵 submatrix。
virtual void zero_rows(std::vector< numeric_index_type > &rows, T diag_value=0.0) override
将指定行的所有元素置为零,并在对角线位置放置一个指定值。
定义用于有限元类型计算的密集矩阵。 用于在求和成全局矩阵之前存储单元刚度矩阵。所有被覆盖的虚函数都记录在dense_matrix_base.h中。
virtual void flush() override
刷新矩阵的内部缓冲区,确保所有操作都已完成。
virtual void close() override
完成任何挂起的插入或添加操作。
virtual Real linfty_norm() const override
计算矩阵的无穷范数。
virtual numeric_index_type n() const =0