18 #include "libmesh/petsc_vector.h"
20 #ifdef LIBMESH_HAVE_PETSC
23 #include "libmesh/petsc_matrix.h"
24 #include "libmesh/dense_subvector.h"
25 #include "libmesh/dense_vector.h"
26 #include "libmesh/int_range.h"
27 #include "libmesh/petsc_macro.h"
28 #include "libmesh/wrapped_petsc.h"
31 #include "timpi/op_function.h"
32 #include "timpi/parallel_implementation.h"
33 #include "timpi/standard_type.h"
46 this->_restore_array();
47 libmesh_assert(this->
closed());
49 PetscErrorCode ierr=0;
52 ierr = VecSum (_vec, &value);
55 return static_cast<T
>(value);
62 this->_restore_array();
63 libmesh_assert(this->
closed());
65 PetscErrorCode ierr=0;
68 ierr = VecNorm (_vec, NORM_1, &value);
71 return static_cast<Real>(value);
79 parallel_object_only();
81 this->_restore_array();
82 libmesh_assert(this->
closed());
84 PetscErrorCode ierr=0;
87 ierr = VecNorm (_vec, NORM_2, &value);
90 return static_cast<Real>(value);
99 parallel_object_only();
101 this->_restore_array();
102 libmesh_assert(this->
closed());
104 PetscErrorCode ierr=0;
107 ierr = VecNorm (_vec, NORM_INFINITY, &value);
108 LIBMESH_CHKERR(ierr);
110 return static_cast<Real>(value);
116 template <
typename T>
120 parallel_object_only();
122 this->_restore_array();
123 libmesh_assert(this->
closed());
132 template <
typename T>
136 parallel_object_only();
138 this->_restore_array();
139 libmesh_assert(this->
closed());
148 template <
typename T>
151 this->_restore_array();
152 libmesh_assert_less (i, size());
154 PetscErrorCode ierr=0;
155 PetscInt i_val =
static_cast<PetscInt
>(i);
156 PetscScalar petsc_value =
PS(value);
158 std::scoped_lock lock(this->_numeric_vector_mutex);
159 ierr = VecSetValues (_vec, 1, &i_val, &petsc_value, INSERT_VALUES);
160 LIBMESH_CHKERR(ierr);
162 this->_is_closed =
false;
167 template <
typename T>
170 parallel_object_only();
172 PetscErrorCode ierr = 0;
175 ierr = VecReciprocal(_vec);
176 LIBMESH_CHKERR(ierr);
181 template <
typename T>
184 parallel_object_only();
186 PetscErrorCode ierr = 0;
189 ierr = VecConjugate(_vec);
190 LIBMESH_CHKERR(ierr);
195 template <
typename T>
198 this->_restore_array();
199 libmesh_assert_less (i, size());
201 PetscErrorCode ierr=0;
202 PetscInt i_val =
static_cast<PetscInt
>(i);
203 PetscScalar petsc_value =
PS(value);
205 std::scoped_lock lock(this->_numeric_vector_mutex);
206 ierr = VecSetValues (_vec, 1, &i_val, &petsc_value, ADD_VALUES);
207 LIBMESH_CHKERR(ierr);
209 this->_is_closed =
false;
214 template <
typename T>
216 const std::vector<numeric_index_type> & dof_indices)
219 if (dof_indices.empty())
222 this->_restore_array();
224 PetscErrorCode ierr=0;
225 const PetscInt * i_val =
reinterpret_cast<const PetscInt *
>(dof_indices.data());
226 const PetscScalar * petsc_value =
pPS(v);
228 std::scoped_lock lock(this->_numeric_vector_mutex);
229 ierr = VecSetValues (_vec, cast_int<PetscInt>(dof_indices.size()),
230 i_val, petsc_value, ADD_VALUES);
231 LIBMESH_CHKERR(ierr);
233 this->_is_closed =
false;
238 template <
typename T>
242 parallel_object_only();
244 this->_restore_array();
246 const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
247 const PetscMatrix<T> * A = cast_ptr<const PetscMatrix<T> *>(&A_in);
249 PetscErrorCode ierr=0;
254 libmesh_warning(
"Matrix A must be assembled before calling PetscVector::add_vector(v, A).\n"
255 "Please update your code, as this warning will become an error in a future release.");
256 libmesh_deprecated();
263 LIBMESH_CHKERR(ierr);
268 template <
typename T>
272 parallel_object_only();
274 this->_restore_array();
276 const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
277 const PetscMatrix<T> * A = cast_ptr<const PetscMatrix<T> *>(&A_in);
279 PetscErrorCode ierr=0;
284 libmesh_warning(
"Matrix A must be assembled before calling PetscVector::add_vector_transpose(v, A).\n"
285 "Please update your code, as this warning will become an error in a future release.");
286 libmesh_deprecated();
292 ierr = MatMultTransposeAdd(
const_cast<PetscMatrix<T> *
>(A)->mat(), v->
_vec, _vec, _vec);
293 LIBMESH_CHKERR(ierr);
298 template <
typename T>
302 parallel_object_only();
304 this->_restore_array();
306 const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
307 const PetscMatrix<T> * A = cast_ptr<const PetscMatrix<T> *>(&A_in);
312 libmesh_warning(
"Matrix A must be assembled before calling PetscVector::add_vector_conjugate_transpose(v, A).\n"
313 "Please update your code, as this warning will become an error in a future release.");
314 libmesh_deprecated();
320 std::unique_ptr<NumericVector<Number>> this_clone = this->clone();
324 PetscErrorCode ierr = MatMultHermitianTranspose(
const_cast<PetscMatrix<T> *
>(A)->mat(), v->
_vec, _vec);
325 LIBMESH_CHKERR(ierr);
328 this->add(1., *this_clone);
333 template <
typename T>
336 this->_get_array(
false);
339 _values[i] += PetscScalar(v_in);
344 template <
typename T>
347 parallel_object_only();
354 template <
typename T>
357 parallel_object_only();
359 this->_restore_array();
368 PetscScalar a =
PS(a_in);
371 const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
374 libmesh_assert_equal_to (this->size(), v->
size());
376 PetscErrorCode ierr = VecAXPY(_vec, a, v->
vec());
377 LIBMESH_CHKERR(ierr);
379 libmesh_assert(this->comm().verify(
int(this->type())));
381 if (this->type() == GHOSTED)
382 VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
384 this->_is_closed =
true;
389 template <
typename T>
391 const std::vector<numeric_index_type> & dof_indices)
393 if (dof_indices.empty())
396 this->_restore_array();
398 PetscErrorCode ierr=0;
400 std::scoped_lock lock(this->_numeric_vector_mutex);
401 ierr = VecSetValues (_vec, cast_int<PetscInt>(dof_indices.size()),
402 idx_values,
pPS(v), INSERT_VALUES);
403 LIBMESH_CHKERR(ierr);
405 this->_is_closed =
false;
410 template <
typename T>
413 parallel_object_only();
415 this->_restore_array();
417 PetscScalar factor =
PS(factor_in);
419 PetscErrorCode ierr = VecScale(_vec, factor);
420 LIBMESH_CHKERR(ierr);
422 libmesh_assert(this->comm().verify(
int(this->type())));
424 if (this->type() == GHOSTED)
425 VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
428 template <
typename T>
431 parallel_object_only();
433 PetscErrorCode ierr = 0;
435 const PetscVector<T> * v_vec = cast_ptr<const PetscVector<T> *>(&v);
437 ierr = VecPointwiseMult(_vec, _vec, v_vec->
_vec);
438 LIBMESH_CHKERR(ierr);
443 template <
typename T>
446 parallel_object_only();
448 PetscErrorCode ierr = 0;
450 const PetscVector<T> * v_vec = cast_ptr<const PetscVector<T> *>(&v);
452 ierr = VecPointwiseDivide(_vec, _vec, v_vec->
_vec);
453 LIBMESH_CHKERR(ierr);
458 template <
typename T>
461 parallel_object_only();
463 this->_restore_array();
465 PetscErrorCode ierr = VecAbs(_vec);
466 LIBMESH_CHKERR(ierr);
468 libmesh_assert(this->comm().verify(
int(this->type())));
470 if (this->type() == GHOSTED)
471 VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
474 template <
typename T>
477 parallel_object_only();
479 this->_restore_array();
482 PetscErrorCode ierr = 0;
485 PetscScalar value=0.;
488 const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
491 ierr = VecDot(this->_vec, v->
_vec, &value);
492 LIBMESH_CHKERR(ierr);
494 return static_cast<T
>(value);
497 template <
typename T>
500 parallel_object_only();
502 this->_restore_array();
505 PetscErrorCode ierr = 0;
508 PetscScalar value=0.;
511 const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
514 ierr = VecTDot(this->_vec, v->
_vec, &value);
515 LIBMESH_CHKERR(ierr);
517 return static_cast<T
>(value);
521 template <
typename T>
525 parallel_object_only();
527 this->_restore_array();
528 libmesh_assert(this->
closed());
530 PetscScalar s =
PS(s_in);
532 if (this->size() != 0)
534 PetscErrorCode ierr = VecSet(_vec, s);
535 LIBMESH_CHKERR(ierr);
537 libmesh_assert(this->comm().verify(
int(this->type())));
539 if (this->type() == GHOSTED)
540 VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
548 template <
typename T>
552 parallel_object_only();
555 const PetscVector<T> * v = cast_ptr<const PetscVector<T> *>(&v_in);
564 template <
typename T>
568 parallel_object_only();
570 this->_restore_array();
573 libmesh_assert_equal_to (this->size(), v.
size());
574 libmesh_assert_equal_to (this->local_size(), v.
local_size());
575 libmesh_assert (v.
closed());
577 PetscErrorCode ierr = 0;
579 ierr = VecCopy (v.
_vec, this->_vec);
580 LIBMESH_CHKERR(ierr);
582 libmesh_assert(this->comm().verify(
int(this->type())));
584 if (this->type() == GHOSTED)
585 VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
587 this->_is_closed =
true;
594 template <
typename T>
598 parallel_object_only();
600 this->_get_array(
false);
606 if (this->size() == v.size())
611 _values[i] =
PS(v[first + i]);
621 _values[i] =
PS(v[i]);
625 if (this->type() == GHOSTED)
633 template <
typename T>
636 parallel_object_only();
638 this->_restore_array();
641 PetscVector<T> * v_local = cast_ptr<PetscVector<T> *>(&v_local_in);
643 libmesh_assert(v_local);
645 libmesh_assert(v_local->
closed());
646 libmesh_assert_equal_to (v_local->
size(), this->size());
651 libmesh_assert(this->size()==v_local->
local_size() || this->local_size()==v_local->
local_size());
653 PetscErrorCode ierr = 0;
655 if (v_local->
type() == SERIAL && this->size() == v_local->
local_size())
658 ierr = VecScatterCreateToAll(_vec, scatter.
get(),
nullptr);
659 LIBMESH_CHKERR(ierr);
660 VecScatterBeginEnd(this->comm(), scatter, _vec, v_local->
_vec, INSERT_VALUES, SCATTER_FORWARD);
664 else if (this->local_size() == v_local->
local_size())
666 ierr = VecCopy(_vec,v_local->
_vec);
667 LIBMESH_CHKERR(ierr);
671 libmesh_error_msg(
"Vectors are inconsistent");
676 if (v_local->
type() == GHOSTED)
677 VecGhostUpdateBeginEnd(this->comm(), v_local->
_vec, INSERT_VALUES, SCATTER_FORWARD);
682 template <
typename T>
684 const std::vector<numeric_index_type> & send_list)
const
686 parallel_object_only();
688 libmesh_assert(this->comm().verify(
int(this->type())));
689 libmesh_assert(this->comm().verify(
int(v_local_in.
type())));
694 if (v_local_in.
type() == GHOSTED &&
695 this->type() == PARALLEL)
703 this->_restore_array();
706 PetscVector<T> * v_local = cast_ptr<PetscVector<T> *>(&v_local_in);
708 libmesh_assert(v_local);
709 libmesh_assert_equal_to (v_local->
size(), this->size());
710 libmesh_assert_less_equal (send_list.size(), v_local->
size());
712 PetscErrorCode ierr=0;
714 cast_int<numeric_index_type>(send_list.size());
716 std::vector<PetscInt> idx(n_sl + this->local_size());
718 idx[i] = static_cast<PetscInt>(send_list[i]);
719 for (
auto i : make_range(this->local_size()))
720 idx[n_sl+i] = i + this->first_local_index();
724 PetscInt * idxptr = idx.empty() ?
nullptr : idx.data();
725 ierr = ISCreateGeneral(this->comm().
get(), n_sl+this->local_size(),
726 idxptr, PETSC_USE_POINTER, is.
get());
727 LIBMESH_CHKERR(ierr);
730 ierr = VecScatterCreate(_vec, is,
733 LIBMESH_CHKERR(ierr);
737 VecScatterBeginEnd(this->comm(), scatter, _vec, v_local->
_vec, INSERT_VALUES, SCATTER_FORWARD);
740 if (v_local->
type() == GHOSTED)
746 template <
typename T>
748 const std::vector<numeric_index_type> & indices)
const
750 parallel_object_only();
753 PetscErrorCode ierr = 0;
757 ierr = VecCreateSeq(PETSC_COMM_SELF, cast_int<PetscInt>(indices.size()), dest.
get());
758 LIBMESH_CHKERR(ierr);
766 ierr = ISCreateGeneral(this->comm().
get(), cast_int<PetscInt>(indices.size()), idxptr,
767 PETSC_USE_POINTER, is.get());
768 LIBMESH_CHKERR(ierr);
772 ierr = VecScatterCreate(_vec,
775 LIBMESH_PETSC_NULLPTR,
777 LIBMESH_CHKERR(ierr);
780 VecScatterBeginEnd(this->comm(), scatter, _vec, dest, INSERT_VALUES, SCATTER_FORWARD);
783 PetscScalar * values;
784 ierr = VecGetArray (dest, &values);
785 LIBMESH_CHKERR(ierr);
790 v_local.reserve(indices.size());
792 v_local.insert(v_local.begin(), values, values+indices.size());
795 ierr = VecRestoreArray (dest, &values);
796 LIBMESH_CHKERR(ierr);
801 template <
typename T>
804 const std::vector<numeric_index_type> & send_list)
806 parallel_object_only();
808 this->_restore_array();
810 libmesh_assert_less_equal (send_list.size(), this->size());
811 libmesh_assert_less_equal (last_local_idx+1, this->size());
815 PetscErrorCode ierr=0;
821 if (this->n_processors() == 1)
829 parallel_vec.
init (my_size, my_local_size,
true, PARALLEL);
835 std::vector<PetscInt> idx(my_local_size);
836 std::iota (idx.begin(), idx.end(), first_local_idx);
840 ierr = ISCreateGeneral(this->comm().
get(), my_local_size,
841 my_local_size ? idx.data() :
nullptr, PETSC_USE_POINTER, is.get());
842 LIBMESH_CHKERR(ierr);
845 ierr = VecScatterCreate(_vec, is,
846 parallel_vec.
_vec, is,
848 LIBMESH_CHKERR(ierr);
851 VecScatterBeginEnd(this->comm(), scatter, _vec, parallel_vec.
_vec, INSERT_VALUES, SCATTER_FORWARD);
855 parallel_vec.
close();
856 parallel_vec.
localize (*
this, send_list);
862 template <
typename T>
865 parallel_object_only();
867 this->_restore_array();
870 parallel_object_only();
872 PetscErrorCode ierr=0;
873 const PetscInt n = this->size();
874 const PetscInt nl = this->local_size();
875 PetscScalar * values;
878 v_local.resize(n, 0.);
880 ierr = VecGetArray (_vec, &values);
881 LIBMESH_CHKERR(ierr);
885 for (PetscInt i=0; i<nl; i++)
886 v_local[i+ioff] = static_cast<T>(values[i]);
888 ierr = VecRestoreArray (_vec, &values);
889 LIBMESH_CHKERR(ierr);
891 this->comm().sum(v_local);
897 #ifdef LIBMESH_USE_REAL_NUMBERS
902 timpi_mpi_var(pid))
const
904 parallel_object_only();
906 this->_restore_array();
908 PetscErrorCode ierr=0;
909 const PetscInt n = size();
910 PetscScalar * values;
913 if (n_processors() == 1)
917 ierr = VecGetArray (_vec, &values);
918 LIBMESH_CHKERR(ierr);
920 for (PetscInt i=0; i<n; i++)
921 v_local[i] = static_cast<Real>(values[i]);
923 ierr = VecRestoreArray (_vec, &values);
924 LIBMESH_CHKERR(ierr);
928 #ifdef LIBMESH_HAVE_MPI
936 ierr = VecScatterCreateToZero(_vec, ctx.
get(), vout.
get());
937 LIBMESH_CHKERR(ierr);
939 VecScatterBeginEnd(this->comm(), ctx, _vec, vout, INSERT_VALUES, SCATTER_FORWARD);
941 if (processor_id() == 0)
945 ierr = VecGetArray (vout, &values);
946 LIBMESH_CHKERR(ierr);
948 for (PetscInt i=0; i<n; i++)
949 v_local[i] = static_cast<Real>(values[i]);
951 ierr = VecRestoreArray (vout, &values);
952 LIBMESH_CHKERR(ierr);
960 std::vector<Real> local_values (n, 0.);
963 ierr = VecGetArray (_vec, &values);
964 LIBMESH_CHKERR(ierr);
966 const PetscInt nl = local_size();
967 for (PetscInt i=0; i<nl; i++)
968 local_values[i+ioff] = static_cast<Real>(values[i]);
970 ierr = VecRestoreArray (_vec, &values);
971 LIBMESH_CHKERR(ierr);
975 MPI_Reduce (local_values.data(), v_local.data(), n,
976 Parallel::StandardType<Real>(),
977 Parallel::OpFunction<Real>::sum(), pid,
981 #endif // LIBMESH_HAVE_MPI
988 #ifdef LIBMESH_USE_COMPLEX_NUMBERS
994 parallel_object_only();
996 this->_restore_array();
998 PetscErrorCode ierr=0;
999 const PetscInt n = size();
1000 const PetscInt nl = local_size();
1001 PetscScalar * values;
1007 for (PetscInt i=0; i<n; i++)
1011 if (n_processors() == 1)
1013 ierr = VecGetArray (_vec, &values);
1014 LIBMESH_CHKERR(ierr);
1016 for (PetscInt i=0; i<n; i++)
1017 v_local[i] = static_cast<Complex>(values[i]);
1019 ierr = VecRestoreArray (_vec, &values);
1020 LIBMESH_CHKERR(ierr);
1031 std::vector<Real> real_local_values(n, 0.);
1032 std::vector<Real> imag_local_values(n, 0.);
1035 ierr = VecGetArray (_vec, &values);
1036 LIBMESH_CHKERR(ierr);
1039 for (PetscInt i=0; i<nl; i++)
1041 real_local_values[i+ioff] =
static_cast<Complex>(values[i]).
real();
1042 imag_local_values[i+ioff] =
static_cast<Complex>(values[i]).
imag();
1045 ierr = VecRestoreArray (_vec, &values);
1046 LIBMESH_CHKERR(ierr);
1054 std::vector<Real> real_v_local(n);
1055 std::vector<Real> imag_v_local(n);
1058 MPI_Reduce (real_local_values.data(),
1059 real_v_local.data(), n,
1060 Parallel::StandardType<Real>(),
1061 Parallel::OpFunction<Real>::sum(),
1062 pid, this->comm().get());
1064 MPI_Reduce (imag_local_values.data(),
1065 imag_v_local.data(), n,
1066 Parallel::StandardType<Real>(),
1067 Parallel::OpFunction<Real>::sum(),
1068 pid, this->comm().get());
1071 for (PetscInt i=0; i<n; i++)
1072 v_local[i] =
Complex(real_v_local[i], imag_v_local[i]);
1080 template <
typename T>
1084 parallel_object_only();
1086 this->_restore_array();
1089 const PetscVector<T> * vec1_petsc = cast_ptr<const PetscVector<T> *>(&vec1);
1090 const PetscVector<T> * vec2_petsc = cast_ptr<const PetscVector<T> *>(&vec2);
1093 PetscErrorCode ierr = VecPointwiseMult(_vec, vec1_petsc->
vec(), vec2_petsc->vec());
1094 LIBMESH_CHKERR(ierr);
1096 libmesh_assert(this->comm().verify(
int(this->type())));
1098 if (this->type() == GHOSTED)
1099 VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
1101 this->_is_closed =
true;
1104 template <
typename T>
1108 parallel_object_only();
1110 this->_restore_array();
1113 const PetscVector<T> *
const vec1_petsc = cast_ptr<const PetscVector<T> *>(&vec1);
1114 const PetscVector<T> *
const vec2_petsc = cast_ptr<const PetscVector<T> *>(&vec2);
1117 PetscErrorCode ierr = VecPointwiseDivide(_vec, vec1_petsc->
vec(), vec2_petsc->vec());
1118 LIBMESH_CHKERR(ierr);
1120 libmesh_assert(this->comm().verify(
int(this->type())));
1122 if (this->type() == GHOSTED)
1123 VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
1125 this->_is_closed =
true;
1128 template <
typename T>
1131 parallel_object_only();
1133 this->_restore_array();
1134 libmesh_assert (this->
closed());
1136 PetscErrorCode ierr = 0;
1139 ierr = PetscViewerCreate (this->comm().
get(), petsc_viewer.
get());
1140 LIBMESH_CHKERR(ierr);
1146 ierr = PetscViewerASCIIOpen( this->comm().
get(),
1148 petsc_viewer.
get());
1149 LIBMESH_CHKERR(ierr);
1151 #if PETSC_VERSION_LESS_THAN(3,7,0)
1152 ierr = PetscViewerSetFormat (petsc_viewer,
1153 PETSC_VIEWER_ASCII_MATLAB);
1155 ierr = PetscViewerPushFormat (petsc_viewer,
1156 PETSC_VIEWER_ASCII_MATLAB);
1159 LIBMESH_CHKERR(ierr);
1161 ierr = VecView (_vec, petsc_viewer);
1162 LIBMESH_CHKERR(ierr);
1169 #if PETSC_VERSION_LESS_THAN(3,7,0)
1170 ierr = PetscViewerSetFormat (PETSC_VIEWER_STDOUT_WORLD,
1171 PETSC_VIEWER_ASCII_MATLAB);
1173 ierr = PetscViewerPushFormat (PETSC_VIEWER_STDOUT_WORLD,
1174 PETSC_VIEWER_ASCII_MATLAB);
1177 LIBMESH_CHKERR(ierr);
1179 ierr = VecView (_vec, PETSC_VIEWER_STDOUT_WORLD);
1180 LIBMESH_CHKERR(ierr);
1188 template <
typename T>
1190 const std::vector<numeric_index_type> & rows)
const
1192 parallel_object_only();
1194 this->_restore_array();
1197 PetscErrorCode ierr = 0;
1200 PetscVector<T> * petsc_subvector = cast_ptr<PetscVector<T> *>(&subvector);
1213 ierr = VecCreateMPI(this->comm().
get(),
1215 cast_int<PetscInt>(rows.size()),
1216 &(petsc_subvector->
_vec));
1217 LIBMESH_CHKERR(ierr);
1219 ierr = VecSetFromOptions (petsc_subvector->
_vec);
1220 LIBMESH_CHKERR(ierr);
1231 std::vector<PetscInt> idx(rows.size());
1232 std::iota (idx.begin(), idx.end(), 0);
1236 ierr = ISCreateGeneral(this->comm().
get(),
1237 cast_int<PetscInt>(rows.size()),
1241 LIBMESH_CHKERR(ierr);
1244 ierr = ISCreateGeneral(this->comm().
get(),
1245 cast_int<PetscInt>(rows.size()),
1248 subvector_is.
get());
1249 LIBMESH_CHKERR(ierr);
1253 ierr = VecScatterCreate(this->_vec,
1255 petsc_subvector->
_vec,
1257 scatter.
get()); LIBMESH_CHKERR(ierr);
1260 VecScatterBeginEnd(this->comm(), scatter, this->_vec, petsc_subvector->
_vec, INSERT_VALUES, SCATTER_FORWARD);
1265 template <
typename T>
1270 bool initially_array_is_present = _array_is_present.load(std::memory_order_acquire);
1274 if (initially_array_is_present && read_only && !_values_read_only)
1275 _read_only_values = _values;
1281 if (initially_array_is_present && !read_only && _values_read_only)
1284 initially_array_is_present =
false;
1287 if (!initially_array_is_present)
1289 std::scoped_lock lock(_petsc_get_restore_array_mutex);
1290 if (!_array_is_present.load(std::memory_order_relaxed))
1292 PetscErrorCode ierr=0;
1293 if (this->type() != GHOSTED)
1297 ierr = VecGetArrayRead(_vec, &_read_only_values);
1298 _values_read_only =
true;
1302 ierr = VecGetArray(_vec, &_values);
1303 _values_read_only =
false;
1305 LIBMESH_CHKERR(ierr);
1306 _local_size = this->local_size();
1310 ierr = VecGhostGetLocalForm (_vec,&_local_form);
1311 LIBMESH_CHKERR(ierr);
1315 ierr = VecGetArrayRead(_local_form, &_read_only_values);
1316 _values_read_only =
true;
1320 ierr = VecGetArray(_local_form, &_values);
1321 _values_read_only =
false;
1323 LIBMESH_CHKERR(ierr);
1325 PetscInt my_local_size = 0;
1326 ierr = VecGetLocalSize(_local_form, &my_local_size);
1327 LIBMESH_CHKERR(ierr);
1332 PetscInt petsc_first=0, petsc_last=0;
1333 ierr = VecGetOwnershipRange (_vec, &petsc_first, &petsc_last);
1334 LIBMESH_CHKERR(ierr);
1338 _array_is_present.store(
true, std::memory_order_release);
1345 template <
typename T>
1348 libmesh_error_msg_if(_values_manually_retrieved,
1349 "PetscVector values were manually retrieved but are being automatically restored!");
1352 if (_array_is_present.load(std::memory_order_acquire))
1354 std::scoped_lock lock(_petsc_get_restore_array_mutex);
1355 if (_array_is_present.load(std::memory_order_relaxed))
1357 PetscErrorCode ierr=0;
1358 if (this->type() != GHOSTED)
1360 if (_values_read_only)
1361 ierr = VecRestoreArrayRead (_vec, &_read_only_values);
1363 ierr = VecRestoreArray (_vec, &_values);
1365 LIBMESH_CHKERR(ierr);
1370 if (_values_read_only)
1371 ierr = VecRestoreArrayRead (_local_form, &_read_only_values);
1373 ierr = VecRestoreArray (_local_form, &_values);
1375 LIBMESH_CHKERR(ierr);
1377 ierr = VecGhostRestoreLocalForm (_vec,&_local_form);
1378 LIBMESH_CHKERR(ierr);
1379 _local_form =
nullptr;
1382 _array_is_present.store(
false, std::memory_order_release);
1398 #endif // #ifdef LIBMESH_HAVE_PETSC
void _get_array(bool read_only) const
从 PETSc 查询数组(如果向量是幽灵的,则还包括本地形式)。
virtual bool closed() const
检查向量是否已经关闭并准备好进行计算。
virtual Real l2_norm() const override
计算 NumericVector 的 L2 范数。
bool closed()
Checks that the library has been closed.
virtual void localize(std::vector< T > &v_local) const override
将当前向量的数据本地化到 std::vector v_local 中。
该类提供了一个良好的接口,用于访问 PETSc 的 Vec 对象。所有重写的虚拟函数都在 numeric_vector.h 中有文档说明。
void add_vector_conjugate_transpose(const NumericVector< T > &v, const SparseMatrix< T > &A)
.
virtual void add_vector_transpose(const NumericVector< T > &v, const SparseMatrix< T > &A) override
使用 SparseMatrix A 的共轭转置与向量 v 进行矩阵乘法,并将结果添加到当前向量。
boost::multiprecision::float128 real(const boost::multiprecision::float128 in)
virtual numeric_index_type size() const override
获取 NumericVector 的大小(维度)。
virtual Real linfty_norm() const override
计算 NumericVector 的 Linf 范数。
virtual numeric_index_type local_size() const override
获取 NumericVector 的本地大小(本地维度)。
virtual void add(const numeric_index_type i, const T value) override
将索引为 i 的元素值增加 value。
Vec _vec
用于保存向量元素的实际 PETSc 向量数据类型。
PetscScalar * pPS(T *ptr)
bool _is_initialized
在调用 init() 后设置为 true。
virtual bool initialized() const
检查向量是否已经初始化。
virtual void pointwise_divide(const NumericVector< T > &vec1, const NumericVector< T > &vec2) override
逐元素将当前向量与 vec1 相除,结果存储在当前向量中。
Vec vec()
获取当前向量的原始 PETSc Vec 指针。
uint8_t processor_id_type
PetscInt * numeric_petsc_cast(const numeric_index_type *p)
这是一个通用的稀疏矩阵类。该类包含了必须在派生类中覆盖的纯虚拟成员。 使用一个公共的基类允许从不同的求解器包中以不同的格式统一访问稀疏矩阵。
virtual void insert(const T *v, const std::vector< numeric_index_type > &dof_indices) override
将指定索引数组的元素设置为向量 v 的元素。
virtual void pointwise_mult(const NumericVector< T > &vec1, const NumericVector< T > &vec2) override
逐元素将当前向量与 vec1 相乘,结果存储在当前向量中。
virtual Real l1_norm() const override
计算 NumericVector 的 L1 范数。
virtual void abs() override
对当前向量的所有元素取绝对值。
dof_id_type numeric_index_type
virtual NumericVector< T > & operator+=(const NumericVector< T > &v) override
将当前 NumericVector 与另一个 NumericVector v 相加。
void _restore_array() const
将数组(如果向量是幽灵的,则还包括本地形式)恢复到 PETSc。
virtual void conjugate() override
对 NumericVector 中的所有元素取共轭。
T * get()
获取托管对象的指针。 这用于模拟以下代码: KSP ksp; KSPCreate(comm, &ksp); 因为在这种上下文中,取包装对象的地址是没有意义的。
virtual void localize_to_one(std::vector< T > &v_local, const processor_id_type proc_id=0) const override
将当前向量的数据本地化到 std::vector v_local 中,只包括一个进程的数据。
virtual void create_subvector(NumericVector< T > &subvector, const std::vector< numeric_index_type > &rows) const override
创建当前向量的子向量,并将其赋值给 subvector。
T indefinite_dot(const NumericVector< T > &v) const
计算 (*this) 与向量 v 的点积,不使用复数值的共轭。
virtual T sum() const override
计算 NumericVector 中的元素之和。
std::complex< Real > Complex
virtual void set(const numeric_index_type i, const T value) override
设置索引为 i 的元素值为 value。
DIE A HORRIBLE DEATH HERE typedef LIBMESH_DEFAULT_SCALAR_TYPE Real
virtual void init(const numeric_index_type N, const numeric_index_type n_local, const bool fast=false, const ParallelType type=AUTOMATIC) override
初始化 NumericVector 对象。
ParallelType type() const
获取向量的类型。
这个类提供了一个方便的接口,用于操作并行稀疏矩阵的 PETSc C 库数据结构。 所有覆盖的虚拟函数都在 sparse_matrix.h 中有详细的文档说明。
bool initialized()
Checks that library initialization has been done.
PetscVector< T > & operator=(const PetscVector< T > &v)
复制赋值运算符。 在执行各种检查后调用 VecCopy。
virtual void reciprocal() override
对 NumericVector 中的所有元素取倒数。
virtual void add_vector(const T *v, const std::vector< numeric_index_type > &dof_indices) override
向指定索引数组的元素中添加向量 v 的元素。
virtual NumericVector< T > & operator*=(const NumericVector< T > &v) override
将当前向量与另一个 NumericVector v 逐元素相乘并赋值给当前向量。
boost::multiprecision::float128 imag(const boost::multiprecision::float128)
virtual void print_matlab(const std::string &name="") const override
将当前向量的数据以 MATLAB 格式打印输出。
virtual void scale(const T factor) override
缩放当前向量的所有元素。
virtual void close() override
关闭 PetscVector。
virtual NumericVector< T > & operator/=(const NumericVector< T > &v) override
将当前向量与另一个 NumericVector v 逐元素相除并赋值给当前向量。
virtual NumericVector< T > & operator-=(const NumericVector< T > &v) override
将当前 NumericVector 与另一个 NumericVector v 相减。
virtual T dot(const NumericVector< T > &v) const override
计算当前向量与向量 v 的点积。