Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_SetValuesBatch;
40: PetscLogEvent MAT_ViennaCLCopyToGPU;
41: PetscLogEvent MAT_CUDACopyToGPU;
42: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
43: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
44: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
45: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
46: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
48: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
50: /*@
51: MatSetRandom - Sets all components of a matrix to random numbers.
53: Logically Collective
55: Input Parameters:
56: + x - the matrix
57: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
58: it will create one internally.
60: Example:
61: .vb
62: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
63: MatSetRandom(x,rctx);
64: PetscRandomDestroy(rctx);
65: .ve
67: Level: intermediate
69: Notes:
70: For sparse matrices that have been preallocated but not been assembled it randomly selects appropriate locations,
72: for sparse matrices that already have locations it fills the locations with random numbers.
74: It generates an error if used on sparse matrices that have not been preallocated.
76: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
77: @*/
78: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
79: {
80: PetscRandom randObj = NULL;
82: PetscFunctionBegin;
86: MatCheckPreallocated(x, 1);
88: if (!rctx) {
89: MPI_Comm comm;
90: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
91: PetscCall(PetscRandomCreate(comm, &randObj));
92: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
93: PetscCall(PetscRandomSetFromOptions(randObj));
94: rctx = randObj;
95: }
96: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
97: PetscUseTypeMethod(x, setrandom, rctx);
98: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
100: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
101: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(PetscRandomDestroy(&randObj));
103: PetscFunctionReturn(PETSC_SUCCESS);
104: }
106: /*@
107: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
109: Logically Collective
111: Input Parameter:
112: . mat - the factored matrix
114: Output Parameters:
115: + pivot - the pivot value computed
116: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
117: the share the matrix
119: Level: advanced
121: Notes:
122: This routine does not work for factorizations done with external packages.
124: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
126: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
128: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
129: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
130: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
131: @*/
132: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
133: {
134: PetscFunctionBegin;
136: PetscAssertPointer(pivot, 2);
137: PetscAssertPointer(row, 3);
138: *pivot = mat->factorerror_zeropivot_value;
139: *row = mat->factorerror_zeropivot_row;
140: PetscFunctionReturn(PETSC_SUCCESS);
141: }
143: /*@
144: MatFactorGetError - gets the error code from a factorization
146: Logically Collective
148: Input Parameter:
149: . mat - the factored matrix
151: Output Parameter:
152: . err - the error code
154: Level: advanced
156: Note:
157: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
159: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
160: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
161: @*/
162: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
163: {
164: PetscFunctionBegin;
166: PetscAssertPointer(err, 2);
167: *err = mat->factorerrortype;
168: PetscFunctionReturn(PETSC_SUCCESS);
169: }
171: /*@
172: MatFactorClearError - clears the error code in a factorization
174: Logically Collective
176: Input Parameter:
177: . mat - the factored matrix
179: Level: developer
181: Note:
182: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
184: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
185: `MatGetErrorCode()`, `MatFactorError`
186: @*/
187: PetscErrorCode MatFactorClearError(Mat mat)
188: {
189: PetscFunctionBegin;
191: mat->factorerrortype = MAT_FACTOR_NOERROR;
192: mat->factorerror_zeropivot_value = 0.0;
193: mat->factorerror_zeropivot_row = 0;
194: PetscFunctionReturn(PETSC_SUCCESS);
195: }
197: PETSC_INTERN PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
198: {
199: Vec r, l;
200: const PetscScalar *al;
201: PetscInt i, nz, gnz, N, n;
203: PetscFunctionBegin;
204: PetscCall(MatCreateVecs(mat, &r, &l));
205: if (!cols) { /* nonzero rows */
206: PetscCall(MatGetSize(mat, &N, NULL));
207: PetscCall(MatGetLocalSize(mat, &n, NULL));
208: PetscCall(VecSet(l, 0.0));
209: PetscCall(VecSetRandom(r, NULL));
210: PetscCall(MatMult(mat, r, l));
211: PetscCall(VecGetArrayRead(l, &al));
212: } else { /* nonzero columns */
213: PetscCall(MatGetSize(mat, NULL, &N));
214: PetscCall(MatGetLocalSize(mat, NULL, &n));
215: PetscCall(VecSet(r, 0.0));
216: PetscCall(VecSetRandom(l, NULL));
217: PetscCall(MatMultTranspose(mat, l, r));
218: PetscCall(VecGetArrayRead(r, &al));
219: }
220: if (tol <= 0.0) {
221: for (i = 0, nz = 0; i < n; i++)
222: if (al[i] != 0.0) nz++;
223: } else {
224: for (i = 0, nz = 0; i < n; i++)
225: if (PetscAbsScalar(al[i]) > tol) nz++;
226: }
227: PetscCall(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
228: if (gnz != N) {
229: PetscInt *nzr;
230: PetscCall(PetscMalloc1(nz, &nzr));
231: if (nz) {
232: if (tol < 0) {
233: for (i = 0, nz = 0; i < n; i++)
234: if (al[i] != 0.0) nzr[nz++] = i;
235: } else {
236: for (i = 0, nz = 0; i < n; i++)
237: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i;
238: }
239: }
240: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
241: } else *nonzero = NULL;
242: if (!cols) { /* nonzero rows */
243: PetscCall(VecRestoreArrayRead(l, &al));
244: } else {
245: PetscCall(VecRestoreArrayRead(r, &al));
246: }
247: PetscCall(VecDestroy(&l));
248: PetscCall(VecDestroy(&r));
249: PetscFunctionReturn(PETSC_SUCCESS);
250: }
252: /*@
253: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
255: Input Parameter:
256: . mat - the matrix
258: Output Parameter:
259: . keptrows - the rows that are not completely zero
261: Level: intermediate
263: Note:
264: `keptrows` is set to `NULL` if all rows are nonzero.
266: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
267: @*/
268: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
269: {
270: PetscFunctionBegin;
273: PetscAssertPointer(keptrows, 2);
274: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
275: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
276: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
277: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
278: PetscFunctionReturn(PETSC_SUCCESS);
279: }
281: /*@
282: MatFindZeroRows - Locate all rows that are completely zero in the matrix
284: Input Parameter:
285: . mat - the matrix
287: Output Parameter:
288: . zerorows - the rows that are completely zero
290: Level: intermediate
292: Note:
293: `zerorows` is set to `NULL` if no rows are zero.
295: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
296: @*/
297: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
298: {
299: IS keptrows;
300: PetscInt m, n;
302: PetscFunctionBegin;
305: PetscAssertPointer(zerorows, 2);
306: PetscCall(MatFindNonzeroRows(mat, &keptrows));
307: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
308: In keeping with this convention, we set zerorows to NULL if there are no zero
309: rows. */
310: if (keptrows == NULL) {
311: *zerorows = NULL;
312: } else {
313: PetscCall(MatGetOwnershipRange(mat, &m, &n));
314: PetscCall(ISComplement(keptrows, m, n, zerorows));
315: PetscCall(ISDestroy(&keptrows));
316: }
317: PetscFunctionReturn(PETSC_SUCCESS);
318: }
320: /*@
321: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
323: Not Collective
325: Input Parameter:
326: . A - the matrix
328: Output Parameter:
329: . a - the diagonal part (which is a SEQUENTIAL matrix)
331: Level: advanced
333: Notes:
334: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
336: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
338: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
339: @*/
340: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
341: {
342: PetscFunctionBegin;
345: PetscAssertPointer(a, 2);
346: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
347: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
348: else {
349: PetscMPIInt size;
351: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
352: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
353: *a = A;
354: }
355: PetscFunctionReturn(PETSC_SUCCESS);
356: }
358: /*@
359: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
361: Collective
363: Input Parameter:
364: . mat - the matrix
366: Output Parameter:
367: . trace - the sum of the diagonal entries
369: Level: advanced
371: .seealso: [](ch_matrices), `Mat`
372: @*/
373: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
374: {
375: Vec diag;
377: PetscFunctionBegin;
379: PetscAssertPointer(trace, 2);
380: PetscCall(MatCreateVecs(mat, &diag, NULL));
381: PetscCall(MatGetDiagonal(mat, diag));
382: PetscCall(VecSum(diag, trace));
383: PetscCall(VecDestroy(&diag));
384: PetscFunctionReturn(PETSC_SUCCESS);
385: }
387: /*@
388: MatRealPart - Zeros out the imaginary part of the matrix
390: Logically Collective
392: Input Parameter:
393: . mat - the matrix
395: Level: advanced
397: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
398: @*/
399: PetscErrorCode MatRealPart(Mat mat)
400: {
401: PetscFunctionBegin;
404: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
405: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
406: MatCheckPreallocated(mat, 1);
407: PetscUseTypeMethod(mat, realpart);
408: PetscFunctionReturn(PETSC_SUCCESS);
409: }
411: /*@C
412: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
414: Collective
416: Input Parameter:
417: . mat - the matrix
419: Output Parameters:
420: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each block)
421: - ghosts - the global indices of the ghost points
423: Level: advanced
425: Note:
426: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()`
428: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`
429: @*/
430: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
431: {
432: PetscFunctionBegin;
435: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
436: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
437: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
438: else {
439: if (nghosts) *nghosts = 0;
440: if (ghosts) *ghosts = NULL;
441: }
442: PetscFunctionReturn(PETSC_SUCCESS);
443: }
445: /*@
446: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
448: Logically Collective
450: Input Parameter:
451: . mat - the matrix
453: Level: advanced
455: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
456: @*/
457: PetscErrorCode MatImaginaryPart(Mat mat)
458: {
459: PetscFunctionBegin;
462: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
463: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
464: MatCheckPreallocated(mat, 1);
465: PetscUseTypeMethod(mat, imaginarypart);
466: PetscFunctionReturn(PETSC_SUCCESS);
467: }
469: /*@
470: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices)
472: Not Collective
474: Input Parameter:
475: . mat - the matrix
477: Output Parameters:
478: + missing - is any diagonal missing
479: - dd - first diagonal entry that is missing (optional) on this process
481: Level: advanced
483: .seealso: [](ch_matrices), `Mat`
484: @*/
485: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
486: {
487: PetscFunctionBegin;
490: PetscAssertPointer(missing, 2);
491: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
492: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
493: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
494: PetscFunctionReturn(PETSC_SUCCESS);
495: }
497: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
498: /*@C
499: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
500: for each row that you get to ensure that your application does
501: not bleed memory.
503: Not Collective
505: Input Parameters:
506: + mat - the matrix
507: - row - the row to get
509: Output Parameters:
510: + ncols - if not `NULL`, the number of nonzeros in the row
511: . cols - if not `NULL`, the column numbers
512: - vals - if not `NULL`, the values
514: Level: advanced
516: Notes:
517: This routine is provided for people who need to have direct access
518: to the structure of a matrix. We hope that we provide enough
519: high-level matrix routines that few users will need it.
521: `MatGetRow()` always returns 0-based column indices, regardless of
522: whether the internal representation is 0-based (default) or 1-based.
524: For better efficiency, set cols and/or vals to `NULL` if you do
525: not wish to extract these quantities.
527: The user can only examine the values extracted with `MatGetRow()`;
528: the values cannot be altered. To change the matrix entries, one
529: must use `MatSetValues()`.
531: You can only have one call to `MatGetRow()` outstanding for a particular
532: matrix at a time, per processor. `MatGetRow()` can only obtain rows
533: associated with the given processor, it cannot get rows from the
534: other processors; for that we suggest using `MatCreateSubMatrices()`, then
535: MatGetRow() on the submatrix. The row index passed to `MatGetRow()`
536: is in the global number of rows.
538: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
540: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
542: Fortran Notes:
543: The calling sequence is
544: .vb
545: MatGetRow(matrix,row,ncols,cols,values,ierr)
546: Mat matrix (input)
547: integer row (input)
548: integer ncols (output)
549: integer cols(maxcols) (output)
550: double precision (or double complex) values(maxcols) output
551: .ve
552: where maxcols >= maximum nonzeros in any row of the matrix.
554: Caution:
555: Do not try to change the contents of the output arrays (`cols` and `vals`).
556: In some cases, this may corrupt the matrix.
558: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
559: @*/
560: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
561: {
562: PetscInt incols;
564: PetscFunctionBegin;
567: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
568: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
569: MatCheckPreallocated(mat, 1);
570: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
571: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
572: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
573: if (ncols) *ncols = incols;
574: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
575: PetscFunctionReturn(PETSC_SUCCESS);
576: }
578: /*@
579: MatConjugate - replaces the matrix values with their complex conjugates
581: Logically Collective
583: Input Parameter:
584: . mat - the matrix
586: Level: advanced
588: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
589: @*/
590: PetscErrorCode MatConjugate(Mat mat)
591: {
592: PetscFunctionBegin;
594: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
595: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
596: PetscUseTypeMethod(mat, conjugate);
597: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
598: }
599: PetscFunctionReturn(PETSC_SUCCESS);
600: }
602: /*@C
603: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
605: Not Collective
607: Input Parameters:
608: + mat - the matrix
609: . row - the row to get
610: . ncols - the number of nonzeros
611: . cols - the columns of the nonzeros
612: - vals - if nonzero the column values
614: Level: advanced
616: Notes:
617: This routine should be called after you have finished examining the entries.
619: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
620: us of the array after it has been restored. If you pass `NULL`, it will
621: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
623: Fortran Notes:
624: The calling sequence is
625: .vb
626: MatRestoreRow(matrix,row,ncols,cols,values,ierr)
627: Mat matrix (input)
628: integer row (input)
629: integer ncols (output)
630: integer cols(maxcols) (output)
631: double precision (or double complex) values(maxcols) output
632: .ve
633: Where maxcols >= maximum nonzeros in any row of the matrix.
635: In Fortran `MatRestoreRow()` MUST be called after `MatGetRow()`
636: before another call to `MatGetRow()` can be made.
638: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
639: @*/
640: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
641: {
642: PetscFunctionBegin;
644: if (ncols) PetscAssertPointer(ncols, 3);
645: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
646: if (!mat->ops->restorerow) PetscFunctionReturn(PETSC_SUCCESS);
647: PetscUseTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
648: if (ncols) *ncols = 0;
649: if (cols) *cols = NULL;
650: if (vals) *vals = NULL;
651: PetscFunctionReturn(PETSC_SUCCESS);
652: }
654: /*@
655: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
656: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
658: Not Collective
660: Input Parameter:
661: . mat - the matrix
663: Level: advanced
665: Note:
666: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
668: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
669: @*/
670: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
671: {
672: PetscFunctionBegin;
675: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
676: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
677: MatCheckPreallocated(mat, 1);
678: if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
679: PetscUseTypeMethod(mat, getrowuppertriangular);
680: PetscFunctionReturn(PETSC_SUCCESS);
681: }
683: /*@
684: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
686: Not Collective
688: Input Parameter:
689: . mat - the matrix
691: Level: advanced
693: Note:
694: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
696: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
697: @*/
698: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
699: {
700: PetscFunctionBegin;
703: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
704: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
705: MatCheckPreallocated(mat, 1);
706: if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
707: PetscUseTypeMethod(mat, restorerowuppertriangular);
708: PetscFunctionReturn(PETSC_SUCCESS);
709: }
711: /*@C
712: MatSetOptionsPrefix - Sets the prefix used for searching for all
713: `Mat` options in the database.
715: Logically Collective
717: Input Parameters:
718: + A - the matrix
719: - prefix - the prefix to prepend to all option names
721: Level: advanced
723: Notes:
724: A hyphen (-) must NOT be given at the beginning of the prefix name.
725: The first character of all runtime options is AUTOMATICALLY the hyphen.
727: This is NOT used for options for the factorization of the matrix. Normally the
728: prefix is automatically passed in from the PC calling the factorization. To set
729: it directly use `MatSetOptionsPrefixFactor()`
731: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
732: @*/
733: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
734: {
735: PetscFunctionBegin;
737: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
738: PetscFunctionReturn(PETSC_SUCCESS);
739: }
741: /*@C
742: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
743: for matrices created with `MatGetFactor()`
745: Logically Collective
747: Input Parameters:
748: + A - the matrix
749: - prefix - the prefix to prepend to all option names for the factored matrix
751: Level: developer
753: Notes:
754: A hyphen (-) must NOT be given at the beginning of the prefix name.
755: The first character of all runtime options is AUTOMATICALLY the hyphen.
757: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
758: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
760: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
761: @*/
762: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
763: {
764: PetscFunctionBegin;
766: if (prefix) {
767: PetscAssertPointer(prefix, 2);
768: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
769: if (prefix != A->factorprefix) {
770: PetscCall(PetscFree(A->factorprefix));
771: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
772: }
773: } else PetscCall(PetscFree(A->factorprefix));
774: PetscFunctionReturn(PETSC_SUCCESS);
775: }
777: /*@C
778: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
779: for matrices created with `MatGetFactor()`
781: Logically Collective
783: Input Parameters:
784: + A - the matrix
785: - prefix - the prefix to prepend to all option names for the factored matrix
787: Level: developer
789: Notes:
790: A hyphen (-) must NOT be given at the beginning of the prefix name.
791: The first character of all runtime options is AUTOMATICALLY the hyphen.
793: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
794: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
796: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
797: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
798: `MatSetOptionsPrefix()`
799: @*/
800: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
801: {
802: size_t len1, len2, new_len;
804: PetscFunctionBegin;
806: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
807: if (!A->factorprefix) {
808: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
809: PetscFunctionReturn(PETSC_SUCCESS);
810: }
811: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
813: PetscCall(PetscStrlen(A->factorprefix, &len1));
814: PetscCall(PetscStrlen(prefix, &len2));
815: new_len = len1 + len2 + 1;
816: PetscCall(PetscRealloc(new_len * sizeof(*(A->factorprefix)), &A->factorprefix));
817: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
818: PetscFunctionReturn(PETSC_SUCCESS);
819: }
821: /*@C
822: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
823: matrix options in the database.
825: Logically Collective
827: Input Parameters:
828: + A - the matrix
829: - prefix - the prefix to prepend to all option names
831: Level: advanced
833: Note:
834: A hyphen (-) must NOT be given at the beginning of the prefix name.
835: The first character of all runtime options is AUTOMATICALLY the hyphen.
837: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
838: @*/
839: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
840: {
841: PetscFunctionBegin;
843: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
844: PetscFunctionReturn(PETSC_SUCCESS);
845: }
847: /*@C
848: MatGetOptionsPrefix - Gets the prefix used for searching for all
849: matrix options in the database.
851: Not Collective
853: Input Parameter:
854: . A - the matrix
856: Output Parameter:
857: . prefix - pointer to the prefix string used
859: Level: advanced
861: Fortran Notes:
862: The user should pass in a string `prefix` of
863: sufficient length to hold the prefix.
865: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
866: @*/
867: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
868: {
869: PetscFunctionBegin;
871: PetscAssertPointer(prefix, 2);
872: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
873: PetscFunctionReturn(PETSC_SUCCESS);
874: }
876: /*@
877: MatResetPreallocation - Reset matrix to use the original nonzero pattern provided by users.
879: Collective
881: Input Parameter:
882: . A - the matrix
884: Level: beginner
886: Notes:
887: The allocated memory will be shrunk after calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
889: Users can reset the preallocation to access the original memory.
891: Currently only supported for `MATAIJ` matrices.
893: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
894: @*/
895: PetscErrorCode MatResetPreallocation(Mat A)
896: {
897: PetscFunctionBegin;
900: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset preallocation after setting some values but not yet calling MatAssemblyBegin()/MatAsssemblyEnd()");
901: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
902: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
903: PetscFunctionReturn(PETSC_SUCCESS);
904: }
906: /*@
907: MatSetUp - Sets up the internal matrix data structures for later use.
909: Collective
911: Input Parameter:
912: . A - the matrix
914: Level: intermediate
916: Notes:
917: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
918: setting values in the matrix.
920: If a suitable preallocation routine is used, this function does not need to be called.
922: This routine is called internally by other matrix functions when needed so rarely needs to be called by users
924: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
925: @*/
926: PetscErrorCode MatSetUp(Mat A)
927: {
928: PetscFunctionBegin;
930: if (!((PetscObject)A)->type_name) {
931: PetscMPIInt size;
933: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
934: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
935: }
936: if (!A->preallocated) PetscTryTypeMethod(A, setup);
937: PetscCall(PetscLayoutSetUp(A->rmap));
938: PetscCall(PetscLayoutSetUp(A->cmap));
939: A->preallocated = PETSC_TRUE;
940: PetscFunctionReturn(PETSC_SUCCESS);
941: }
943: #if defined(PETSC_HAVE_SAWS)
944: #include <petscviewersaws.h>
945: #endif
947: /*
948: If threadsafety is on extraneous matrices may be printed
950: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
951: */
952: #if !defined(PETSC_HAVE_THREADSAFETY)
953: static PetscInt insidematview = 0;
954: #endif
956: /*@C
957: MatViewFromOptions - View properties of the matrix based on options set in the options database
959: Collective
961: Input Parameters:
962: + A - the matrix
963: . obj - optional additional object that provides the options prefix to use
964: - name - command line option
966: Options Database Key:
967: . -mat_view [viewertype]:... - the viewer and its options
969: Level: intermediate
971: Notes:
972: .vb
973: If no value is provided ascii:stdout is used
974: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
975: for example ascii::ascii_info prints just the information about the object not all details
976: unless :append is given filename opens in write mode, overwriting what was already there
977: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
978: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
979: socket[:port] defaults to the standard output port
980: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
981: .ve
983: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
984: @*/
985: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
986: {
987: PetscFunctionBegin;
989: #if !defined(PETSC_HAVE_THREADSAFETY)
990: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
991: #endif
992: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
993: PetscFunctionReturn(PETSC_SUCCESS);
994: }
996: /*@C
997: MatView - display information about a matrix in a variety ways
999: Collective
1001: Input Parameters:
1002: + mat - the matrix
1003: - viewer - visualization context
1005: Options Database Keys:
1006: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1007: . -mat_view ::ascii_info_detail - Prints more detailed info
1008: . -mat_view - Prints matrix in ASCII format
1009: . -mat_view ::ascii_matlab - Prints matrix in Matlab format
1010: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1011: . -display <name> - Sets display name (default is host)
1012: . -draw_pause <sec> - Sets number of seconds to pause after display
1013: . -mat_view socket - Sends matrix to socket, can be accessed from Matlab (see Users-Manual: ch_matlab for details)
1014: . -viewer_socket_machine <machine> - -
1015: . -viewer_socket_port <port> - -
1016: . -mat_view binary - save matrix to file in binary format
1017: - -viewer_binary_filename <name> - -
1019: Level: beginner
1021: Notes:
1022: The available visualization contexts include
1023: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1024: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1025: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1026: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1028: The user can open alternative visualization contexts with
1029: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1030: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a
1031: specified file; corresponding input uses `MatLoad()`
1032: . `PetscViewerDrawOpen()` - Outputs nonzero matrix structure to
1033: an X window display
1034: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer.
1035: Currently only the `MATSEQDENSE` and `MATAIJ`
1036: matrix types support the Socket viewer.
1038: The user can call `PetscViewerPushFormat()` to specify the output
1039: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1040: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1041: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1042: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in Matlab format
1043: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1044: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse
1045: format common among all matrix types
1046: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific
1047: format (which is in many cases the same as the default)
1048: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix
1049: size and structure (not the matrix entries)
1050: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about
1051: the matrix structure
1053: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1054: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1056: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1058: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1059: viewer is used.
1061: See share/petsc/matlab/PetscBinaryRead.m for a Matlab code that can read in the binary file when the binary
1062: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1064: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1065: and then use the following mouse functions.
1066: .vb
1067: left mouse: zoom in
1068: middle mouse: zoom out
1069: right mouse: continue with the simulation
1070: .ve
1072: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1073: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1074: @*/
1075: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1076: {
1077: PetscInt rows, cols, rbs, cbs;
1078: PetscBool isascii, isstring, issaws;
1079: PetscViewerFormat format;
1080: PetscMPIInt size;
1082: PetscFunctionBegin;
1085: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1087: PetscCheckSameComm(mat, 1, viewer, 2);
1089: PetscCall(PetscViewerGetFormat(viewer, &format));
1090: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
1091: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1093: #if !defined(PETSC_HAVE_THREADSAFETY)
1094: insidematview++;
1095: #endif
1096: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1097: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1098: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1099: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1101: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1102: if (isascii) {
1103: if (!mat->preallocated) {
1104: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1105: #if !defined(PETSC_HAVE_THREADSAFETY)
1106: insidematview--;
1107: #endif
1108: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1109: PetscFunctionReturn(PETSC_SUCCESS);
1110: }
1111: if (!mat->assembled) {
1112: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1113: #if !defined(PETSC_HAVE_THREADSAFETY)
1114: insidematview--;
1115: #endif
1116: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1117: PetscFunctionReturn(PETSC_SUCCESS);
1118: }
1119: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1120: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1121: MatNullSpace nullsp, transnullsp;
1123: PetscCall(PetscViewerASCIIPushTab(viewer));
1124: PetscCall(MatGetSize(mat, &rows, &cols));
1125: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1126: if (rbs != 1 || cbs != 1) {
1127: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "\n", rows, cols, rbs, cbs));
1128: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "\n", rows, cols, rbs));
1129: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1130: if (mat->factortype) {
1131: MatSolverType solver;
1132: PetscCall(MatFactorGetSolverType(mat, &solver));
1133: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1134: }
1135: if (mat->ops->getinfo) {
1136: MatInfo info;
1137: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1138: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1139: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1140: }
1141: PetscCall(MatGetNullSpace(mat, &nullsp));
1142: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1143: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1144: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1145: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1146: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1147: PetscCall(PetscViewerASCIIPushTab(viewer));
1148: PetscCall(MatProductView(mat, viewer));
1149: PetscCall(PetscViewerASCIIPopTab(viewer));
1150: }
1151: } else if (issaws) {
1152: #if defined(PETSC_HAVE_SAWS)
1153: PetscMPIInt rank;
1155: PetscCall(PetscObjectName((PetscObject)mat));
1156: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1157: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1158: #endif
1159: } else if (isstring) {
1160: const char *type;
1161: PetscCall(MatGetType(mat, &type));
1162: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1163: PetscTryTypeMethod(mat, view, viewer);
1164: }
1165: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1166: PetscCall(PetscViewerASCIIPushTab(viewer));
1167: PetscUseTypeMethod(mat, viewnative, viewer);
1168: PetscCall(PetscViewerASCIIPopTab(viewer));
1169: } else if (mat->ops->view) {
1170: PetscCall(PetscViewerASCIIPushTab(viewer));
1171: PetscUseTypeMethod(mat, view, viewer);
1172: PetscCall(PetscViewerASCIIPopTab(viewer));
1173: }
1174: if (isascii) {
1175: PetscCall(PetscViewerGetFormat(viewer, &format));
1176: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1177: }
1178: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1179: #if !defined(PETSC_HAVE_THREADSAFETY)
1180: insidematview--;
1181: #endif
1182: PetscFunctionReturn(PETSC_SUCCESS);
1183: }
1185: #if defined(PETSC_USE_DEBUG)
1186: #include <../src/sys/totalview/tv_data_display.h>
1187: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1188: {
1189: TV_add_row("Local rows", "int", &mat->rmap->n);
1190: TV_add_row("Local columns", "int", &mat->cmap->n);
1191: TV_add_row("Global rows", "int", &mat->rmap->N);
1192: TV_add_row("Global columns", "int", &mat->cmap->N);
1193: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1194: return TV_format_OK;
1195: }
1196: #endif
1198: /*@C
1199: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1200: with `MatView()`. The matrix format is determined from the options database.
1201: Generates a parallel MPI matrix if the communicator has more than one
1202: processor. The default matrix type is `MATAIJ`.
1204: Collective
1206: Input Parameters:
1207: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1208: or some related function before a call to `MatLoad()`
1209: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1211: Options Database Keys:
1212: Used with block matrix formats (`MATSEQBAIJ`, ...) to specify
1213: block size
1214: . -matload_block_size <bs> - set block size
1216: Level: beginner
1218: Notes:
1219: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1220: `Mat` before calling this routine if you wish to set it from the options database.
1222: `MatLoad()` automatically loads into the options database any options
1223: given in the file filename.info where filename is the name of the file
1224: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1225: file will be ignored if you use the -viewer_binary_skip_info option.
1227: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1228: sets the default matrix type AIJ and sets the local and global sizes.
1229: If type and/or size is already set, then the same are used.
1231: In parallel, each processor can load a subset of rows (or the
1232: entire matrix). This routine is especially useful when a large
1233: matrix is stored on disk and only part of it is desired on each
1234: processor. For example, a parallel solver may access only some of
1235: the rows from each processor. The algorithm used here reads
1236: relatively small blocks of data rather than reading the entire
1237: matrix and then subsetting it.
1239: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1240: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1241: or the sequence like
1242: .vb
1243: `PetscViewer` v;
1244: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1245: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1246: `PetscViewerSetFromOptions`(v);
1247: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1248: `PetscViewerFileSetName`(v,"datafile");
1249: .ve
1250: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1251: $ -viewer_type {binary, hdf5}
1253: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1254: and src/mat/tutorials/ex10.c with the second approach.
1256: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1257: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1258: Multiple objects, both matrices and vectors, can be stored within the same file.
1259: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1261: Most users should not need to know the details of the binary storage
1262: format, since `MatLoad()` and `MatView()` completely hide these details.
1263: But for anyone who is interested, the standard binary matrix storage
1264: format is
1266: .vb
1267: PetscInt MAT_FILE_CLASSID
1268: PetscInt number of rows
1269: PetscInt number of columns
1270: PetscInt total number of nonzeros
1271: PetscInt *number nonzeros in each row
1272: PetscInt *column indices of all nonzeros (starting index is zero)
1273: PetscScalar *values of all nonzeros
1274: .ve
1275: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1276: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1277: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1279: PETSc automatically does the byte swapping for
1280: machines that store the bytes reversed. Thus if you write your own binary
1281: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1282: and `PetscBinaryWrite()` to see how this may be done.
1284: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1285: Each processor's chunk is loaded independently by its owning MPI process.
1286: Multiple objects, both matrices and vectors, can be stored within the same file.
1287: They are looked up by their PetscObject name.
1289: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1290: by default the same structure and naming of the AIJ arrays and column count
1291: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1292: $ save example.mat A b -v7.3
1293: can be directly read by this routine (see Reference 1 for details).
1295: Depending on your MATLAB version, this format might be a default,
1296: otherwise you can set it as default in Preferences.
1298: Unless -nocompression flag is used to save the file in MATLAB,
1299: PETSc must be configured with ZLIB package.
1301: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1303: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1305: Corresponding `MatView()` is not yet implemented.
1307: The loaded matrix is actually a transpose of the original one in MATLAB,
1308: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1309: With this format, matrix is automatically transposed by PETSc,
1310: unless the matrix is marked as SPD or symmetric
1311: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1313: References:
1314: . * - MATLAB(R) Documentation, manual page of save(), https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version
1316: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1317: @*/
1318: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1319: {
1320: PetscBool flg;
1322: PetscFunctionBegin;
1326: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1328: flg = PETSC_FALSE;
1329: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1330: if (flg) {
1331: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1332: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1333: }
1334: flg = PETSC_FALSE;
1335: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1336: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1338: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1339: PetscUseTypeMethod(mat, load, viewer);
1340: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1341: PetscFunctionReturn(PETSC_SUCCESS);
1342: }
1344: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1345: {
1346: Mat_Redundant *redund = *redundant;
1348: PetscFunctionBegin;
1349: if (redund) {
1350: if (redund->matseq) { /* via MatCreateSubMatrices() */
1351: PetscCall(ISDestroy(&redund->isrow));
1352: PetscCall(ISDestroy(&redund->iscol));
1353: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1354: } else {
1355: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1356: PetscCall(PetscFree(redund->sbuf_j));
1357: PetscCall(PetscFree(redund->sbuf_a));
1358: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1359: PetscCall(PetscFree(redund->rbuf_j[i]));
1360: PetscCall(PetscFree(redund->rbuf_a[i]));
1361: }
1362: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1363: }
1365: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1366: PetscCall(PetscFree(redund));
1367: }
1368: PetscFunctionReturn(PETSC_SUCCESS);
1369: }
1371: /*@C
1372: MatDestroy - Frees space taken by a matrix.
1374: Collective
1376: Input Parameter:
1377: . A - the matrix
1379: Level: beginner
1381: Developer Notes:
1382: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1383: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1384: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1385: if changes are needed here.
1387: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1388: @*/
1389: PetscErrorCode MatDestroy(Mat *A)
1390: {
1391: PetscFunctionBegin;
1392: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1394: if (--((PetscObject)(*A))->refct > 0) {
1395: *A = NULL;
1396: PetscFunctionReturn(PETSC_SUCCESS);
1397: }
1399: /* if memory was published with SAWs then destroy it */
1400: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1401: PetscTryTypeMethod((*A), destroy);
1403: PetscCall(PetscFree((*A)->factorprefix));
1404: PetscCall(PetscFree((*A)->defaultvectype));
1405: PetscCall(PetscFree((*A)->defaultrandtype));
1406: PetscCall(PetscFree((*A)->bsizes));
1407: PetscCall(PetscFree((*A)->solvertype));
1408: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1409: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1410: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1411: PetscCall(MatProductClear(*A));
1412: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1413: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1414: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1415: PetscCall(MatDestroy(&(*A)->schur));
1416: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1417: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1418: PetscCall(PetscHeaderDestroy(A));
1419: PetscFunctionReturn(PETSC_SUCCESS);
1420: }
1422: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1423: /*@C
1424: MatSetValues - Inserts or adds a block of values into a matrix.
1425: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1426: MUST be called after all calls to `MatSetValues()` have been completed.
1428: Not Collective
1430: Input Parameters:
1431: + mat - the matrix
1432: . v - a logically two-dimensional array of values
1433: . m - the number of rows
1434: . idxm - the global indices of the rows
1435: . n - the number of columns
1436: . idxn - the global indices of the columns
1437: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1439: Level: beginner
1441: Notes:
1442: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1444: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1445: options cannot be mixed without intervening calls to the assembly
1446: routines.
1448: `MatSetValues()` uses 0-based row and column numbers in Fortran
1449: as well as in C.
1451: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1452: simply ignored. This allows easily inserting element stiffness matrices
1453: with homogeneous Dirichlet boundary conditions that you don't want represented
1454: in the matrix.
1456: Efficiency Alert:
1457: The routine `MatSetValuesBlocked()` may offer much better efficiency
1458: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1460: Developer Notes:
1461: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1462: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1464: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1465: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1466: @*/
1467: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1468: {
1469: PetscFunctionBeginHot;
1472: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1473: PetscAssertPointer(idxm, 3);
1474: PetscAssertPointer(idxn, 5);
1475: MatCheckPreallocated(mat, 1);
1477: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1478: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1480: if (PetscDefined(USE_DEBUG)) {
1481: PetscInt i, j;
1483: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1484: for (i = 0; i < m; i++) {
1485: for (j = 0; j < n; j++) {
1486: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1487: #if defined(PETSC_USE_COMPLEX)
1488: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1489: #else
1490: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1491: #endif
1492: }
1493: }
1494: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1495: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1496: }
1498: if (mat->assembled) {
1499: mat->was_assembled = PETSC_TRUE;
1500: mat->assembled = PETSC_FALSE;
1501: }
1502: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1503: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1504: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1505: PetscFunctionReturn(PETSC_SUCCESS);
1506: }
1508: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1509: /*@C
1510: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1511: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1512: MUST be called after all calls to `MatSetValues()` have been completed.
1514: Not Collective
1516: Input Parameters:
1517: + mat - the matrix
1518: . v - a logically two-dimensional array of values
1519: . ism - the rows to provide
1520: . isn - the columns to provide
1521: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1523: Level: beginner
1525: Notes:
1526: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1528: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1529: options cannot be mixed without intervening calls to the assembly
1530: routines.
1532: `MatSetValues()` uses 0-based row and column numbers in Fortran
1533: as well as in C.
1535: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1536: simply ignored. This allows easily inserting element stiffness matrices
1537: with homogeneous Dirichlet boundary conditions that you don't want represented
1538: in the matrix.
1540: Efficiency Alert:
1541: The routine `MatSetValuesBlocked()` may offer much better efficiency
1542: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1544: This is currently not optimized for any particular `ISType`
1546: Developer Notes:
1547: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1548: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1550: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1551: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1552: @*/
1553: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1554: {
1555: PetscInt m, n;
1556: const PetscInt *rows, *cols;
1558: PetscFunctionBeginHot;
1560: PetscCall(ISGetIndices(ism, &rows));
1561: PetscCall(ISGetIndices(isn, &cols));
1562: PetscCall(ISGetLocalSize(ism, &m));
1563: PetscCall(ISGetLocalSize(isn, &n));
1564: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1565: PetscCall(ISRestoreIndices(ism, &rows));
1566: PetscCall(ISRestoreIndices(isn, &cols));
1567: PetscFunctionReturn(PETSC_SUCCESS);
1568: }
1570: /*@
1571: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1572: values into a matrix
1574: Not Collective
1576: Input Parameters:
1577: + mat - the matrix
1578: . row - the (block) row to set
1579: - v - a logically two-dimensional array of values
1581: Level: intermediate
1583: Notes:
1584: The values, `v`, are column-oriented (for the block version) and sorted
1586: All the nonzeros in the row must be provided
1588: The matrix must have previously had its column indices set, likely by having been assembled.
1590: The row must belong to this process
1592: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1593: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1594: @*/
1595: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1596: {
1597: PetscInt globalrow;
1599: PetscFunctionBegin;
1602: PetscAssertPointer(v, 3);
1603: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1604: PetscCall(MatSetValuesRow(mat, globalrow, v));
1605: PetscFunctionReturn(PETSC_SUCCESS);
1606: }
1608: /*@
1609: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1610: values into a matrix
1612: Not Collective
1614: Input Parameters:
1615: + mat - the matrix
1616: . row - the (block) row to set
1617: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1619: Level: advanced
1621: Notes:
1622: The values, `v`, are column-oriented for the block version.
1624: All the nonzeros in the row must be provided
1626: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1628: The row must belong to this process
1630: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1631: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1632: @*/
1633: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1634: {
1635: PetscFunctionBeginHot;
1638: MatCheckPreallocated(mat, 1);
1639: PetscAssertPointer(v, 3);
1640: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1641: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1642: mat->insertmode = INSERT_VALUES;
1644: if (mat->assembled) {
1645: mat->was_assembled = PETSC_TRUE;
1646: mat->assembled = PETSC_FALSE;
1647: }
1648: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1649: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1650: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1651: PetscFunctionReturn(PETSC_SUCCESS);
1652: }
1654: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1655: /*@
1656: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1657: Using structured grid indexing
1659: Not Collective
1661: Input Parameters:
1662: + mat - the matrix
1663: . m - number of rows being entered
1664: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1665: . n - number of columns being entered
1666: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1667: . v - a logically two-dimensional array of values
1668: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1670: Level: beginner
1672: Notes:
1673: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1675: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1676: options cannot be mixed without intervening calls to the assembly
1677: routines.
1679: The grid coordinates are across the entire grid, not just the local portion
1681: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1682: as well as in C.
1684: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1686: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1687: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1689: The columns and rows in the stencil passed in MUST be contained within the
1690: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1691: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1692: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1693: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1695: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1696: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1697: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1698: `DM_BOUNDARY_PERIODIC` boundary type.
1700: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1701: a single value per point) you can skip filling those indices.
1703: Inspired by the structured grid interface to the HYPRE package
1704: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1706: Efficiency Alert:
1707: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1708: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1710: Fortran Notes:
1711: `idxm` and `idxn` should be declared as
1712: $ MatStencil idxm(4,m),idxn(4,n)
1713: and the values inserted using
1714: .vb
1715: idxm(MatStencil_i,1) = i
1716: idxm(MatStencil_j,1) = j
1717: idxm(MatStencil_k,1) = k
1718: idxm(MatStencil_c,1) = c
1719: etc
1720: .ve
1722: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1723: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1724: @*/
1725: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1726: {
1727: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1728: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1729: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1731: PetscFunctionBegin;
1732: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1735: PetscAssertPointer(idxm, 3);
1736: PetscAssertPointer(idxn, 5);
1738: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1739: jdxm = buf;
1740: jdxn = buf + m;
1741: } else {
1742: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1743: jdxm = bufm;
1744: jdxn = bufn;
1745: }
1746: for (i = 0; i < m; i++) {
1747: for (j = 0; j < 3 - sdim; j++) dxm++;
1748: tmp = *dxm++ - starts[0];
1749: for (j = 0; j < dim - 1; j++) {
1750: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1751: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1752: }
1753: if (mat->stencil.noc) dxm++;
1754: jdxm[i] = tmp;
1755: }
1756: for (i = 0; i < n; i++) {
1757: for (j = 0; j < 3 - sdim; j++) dxn++;
1758: tmp = *dxn++ - starts[0];
1759: for (j = 0; j < dim - 1; j++) {
1760: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1761: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1762: }
1763: if (mat->stencil.noc) dxn++;
1764: jdxn[i] = tmp;
1765: }
1766: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1767: PetscCall(PetscFree2(bufm, bufn));
1768: PetscFunctionReturn(PETSC_SUCCESS);
1769: }
1771: /*@
1772: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1773: Using structured grid indexing
1775: Not Collective
1777: Input Parameters:
1778: + mat - the matrix
1779: . m - number of rows being entered
1780: . idxm - grid coordinates for matrix rows being entered
1781: . n - number of columns being entered
1782: . idxn - grid coordinates for matrix columns being entered
1783: . v - a logically two-dimensional array of values
1784: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1786: Level: beginner
1788: Notes:
1789: By default the values, `v`, are row-oriented and unsorted.
1790: See `MatSetOption()` for other options.
1792: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1793: options cannot be mixed without intervening calls to the assembly
1794: routines.
1796: The grid coordinates are across the entire grid, not just the local portion
1798: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1799: as well as in C.
1801: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1803: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1804: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1806: The columns and rows in the stencil passed in MUST be contained within the
1807: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1808: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1809: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1810: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1812: Negative indices may be passed in idxm and idxn, these rows and columns are
1813: simply ignored. This allows easily inserting element stiffness matrices
1814: with homogeneous Dirichlet boundary conditions that you don't want represented
1815: in the matrix.
1817: Inspired by the structured grid interface to the HYPRE package
1818: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1820: Fortran Notes:
1821: `idxm` and `idxn` should be declared as
1822: $ MatStencil idxm(4,m),idxn(4,n)
1823: and the values inserted using
1824: .vb
1825: idxm(MatStencil_i,1) = i
1826: idxm(MatStencil_j,1) = j
1827: idxm(MatStencil_k,1) = k
1828: etc
1829: .ve
1831: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1832: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1833: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1834: @*/
1835: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1836: {
1837: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1838: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1839: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1841: PetscFunctionBegin;
1842: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1845: PetscAssertPointer(idxm, 3);
1846: PetscAssertPointer(idxn, 5);
1847: PetscAssertPointer(v, 6);
1849: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1850: jdxm = buf;
1851: jdxn = buf + m;
1852: } else {
1853: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1854: jdxm = bufm;
1855: jdxn = bufn;
1856: }
1857: for (i = 0; i < m; i++) {
1858: for (j = 0; j < 3 - sdim; j++) dxm++;
1859: tmp = *dxm++ - starts[0];
1860: for (j = 0; j < sdim - 1; j++) {
1861: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1862: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1863: }
1864: dxm++;
1865: jdxm[i] = tmp;
1866: }
1867: for (i = 0; i < n; i++) {
1868: for (j = 0; j < 3 - sdim; j++) dxn++;
1869: tmp = *dxn++ - starts[0];
1870: for (j = 0; j < sdim - 1; j++) {
1871: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1872: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1873: }
1874: dxn++;
1875: jdxn[i] = tmp;
1876: }
1877: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1878: PetscCall(PetscFree2(bufm, bufn));
1879: PetscFunctionReturn(PETSC_SUCCESS);
1880: }
1882: /*@
1883: MatSetStencil - Sets the grid information for setting values into a matrix via
1884: `MatSetValuesStencil()`
1886: Not Collective
1888: Input Parameters:
1889: + mat - the matrix
1890: . dim - dimension of the grid 1, 2, or 3
1891: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1892: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1893: - dof - number of degrees of freedom per node
1895: Level: beginner
1897: Notes:
1898: Inspired by the structured grid interface to the HYPRE package
1899: (www.llnl.gov/CASC/hyper)
1901: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1902: user.
1904: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1905: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1906: @*/
1907: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1908: {
1909: PetscFunctionBegin;
1911: PetscAssertPointer(dims, 3);
1912: PetscAssertPointer(starts, 4);
1914: mat->stencil.dim = dim + (dof > 1);
1915: for (PetscInt i = 0; i < dim; i++) {
1916: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1917: mat->stencil.starts[i] = starts[dim - i - 1];
1918: }
1919: mat->stencil.dims[dim] = dof;
1920: mat->stencil.starts[dim] = 0;
1921: mat->stencil.noc = (PetscBool)(dof == 1);
1922: PetscFunctionReturn(PETSC_SUCCESS);
1923: }
1925: /*@C
1926: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1928: Not Collective
1930: Input Parameters:
1931: + mat - the matrix
1932: . v - a logically two-dimensional array of values
1933: . m - the number of block rows
1934: . idxm - the global block indices
1935: . n - the number of block columns
1936: . idxn - the global block indices
1937: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
1939: Level: intermediate
1941: Notes:
1942: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
1943: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
1945: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
1946: NOT the total number of rows/columns; for example, if the block size is 2 and
1947: you are passing in values for rows 2,3,4,5 then m would be 2 (not 4).
1948: The values in idxm would be 1 2; that is the first index for each block divided by
1949: the block size.
1951: You must call `MatSetBlockSize()` when constructing this matrix (before
1952: preallocating it).
1954: By default the values, `v`, are row-oriented, so the layout of
1955: `v` is the same as for `MatSetValues()`. See `MatSetOption()` for other options.
1957: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
1958: options cannot be mixed without intervening calls to the assembly
1959: routines.
1961: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
1962: as well as in C.
1964: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1965: simply ignored. This allows easily inserting element stiffness matrices
1966: with homogeneous Dirichlet boundary conditions that you don't want represented
1967: in the matrix.
1969: Each time an entry is set within a sparse matrix via `MatSetValues()`,
1970: internal searching must be done to determine where to place the
1971: data in the matrix storage space. By instead inserting blocks of
1972: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
1973: reduced.
1975: Example:
1976: .vb
1977: Suppose m=n=2 and block size(bs) = 2 The array is
1979: 1 2 | 3 4
1980: 5 6 | 7 8
1981: - - - | - - -
1982: 9 10 | 11 12
1983: 13 14 | 15 16
1985: v[] should be passed in like
1986: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
1988: If you are not using row oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
1989: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
1990: .ve
1992: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
1993: @*/
1994: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1995: {
1996: PetscFunctionBeginHot;
1999: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2000: PetscAssertPointer(idxm, 3);
2001: PetscAssertPointer(idxn, 5);
2002: MatCheckPreallocated(mat, 1);
2003: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2004: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2005: if (PetscDefined(USE_DEBUG)) {
2006: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2007: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2008: }
2009: if (PetscDefined(USE_DEBUG)) {
2010: PetscInt rbs, cbs, M, N, i;
2011: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2012: PetscCall(MatGetSize(mat, &M, &N));
2013: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block index %" PetscInt_FMT " (index %" PetscInt_FMT ") greater than row length %" PetscInt_FMT, i, idxm[i], M);
2014: for (i = 0; i < n; i++) PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block index %" PetscInt_FMT " (index %" PetscInt_FMT ") great than column length %" PetscInt_FMT, i, idxn[i], N);
2015: }
2016: if (mat->assembled) {
2017: mat->was_assembled = PETSC_TRUE;
2018: mat->assembled = PETSC_FALSE;
2019: }
2020: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2021: if (mat->ops->setvaluesblocked) {
2022: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2023: } else {
2024: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2025: PetscInt i, j, bs, cbs;
2027: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2028: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2029: iidxm = buf;
2030: iidxn = buf + m * bs;
2031: } else {
2032: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2033: iidxm = bufr;
2034: iidxn = bufc;
2035: }
2036: for (i = 0; i < m; i++) {
2037: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2038: }
2039: if (m != n || bs != cbs || idxm != idxn) {
2040: for (i = 0; i < n; i++) {
2041: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2042: }
2043: } else iidxn = iidxm;
2044: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2045: PetscCall(PetscFree2(bufr, bufc));
2046: }
2047: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2048: PetscFunctionReturn(PETSC_SUCCESS);
2049: }
2051: /*@C
2052: MatGetValues - Gets a block of local values from a matrix.
2054: Not Collective; can only return values that are owned by the give process
2056: Input Parameters:
2057: + mat - the matrix
2058: . v - a logically two-dimensional array for storing the values
2059: . m - the number of rows
2060: . idxm - the global indices of the rows
2061: . n - the number of columns
2062: - idxn - the global indices of the columns
2064: Level: advanced
2066: Notes:
2067: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2068: The values, `v`, are then returned in a row-oriented format,
2069: analogous to that used by default in `MatSetValues()`.
2071: `MatGetValues()` uses 0-based row and column numbers in
2072: Fortran as well as in C.
2074: `MatGetValues()` requires that the matrix has been assembled
2075: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2076: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2077: without intermediate matrix assembly.
2079: Negative row or column indices will be ignored and those locations in `v` will be
2080: left unchanged.
2082: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2083: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2084: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2086: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2087: @*/
2088: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2089: {
2090: PetscFunctionBegin;
2093: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2094: PetscAssertPointer(idxm, 3);
2095: PetscAssertPointer(idxn, 5);
2096: PetscAssertPointer(v, 6);
2097: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2098: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2099: MatCheckPreallocated(mat, 1);
2101: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2102: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2103: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2104: PetscFunctionReturn(PETSC_SUCCESS);
2105: }
2107: /*@C
2108: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2109: defined previously by `MatSetLocalToGlobalMapping()`
2111: Not Collective
2113: Input Parameters:
2114: + mat - the matrix
2115: . nrow - number of rows
2116: . irow - the row local indices
2117: . ncol - number of columns
2118: - icol - the column local indices
2120: Output Parameter:
2121: . y - a logically two-dimensional array of values
2123: Level: advanced
2125: Notes:
2126: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2128: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2129: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2130: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2131: with `MatSetLocalToGlobalMapping()`.
2133: Developer Notes:
2134: This is labelled with C so does not automatically generate Fortran stubs and interfaces
2135: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2137: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2138: `MatSetValuesLocal()`, `MatGetValues()`
2139: @*/
2140: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2141: {
2142: PetscFunctionBeginHot;
2145: MatCheckPreallocated(mat, 1);
2146: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2147: PetscAssertPointer(irow, 3);
2148: PetscAssertPointer(icol, 5);
2149: if (PetscDefined(USE_DEBUG)) {
2150: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2151: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2152: }
2153: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2154: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2155: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2156: else {
2157: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2158: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2159: irowm = buf;
2160: icolm = buf + nrow;
2161: } else {
2162: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2163: irowm = bufr;
2164: icolm = bufc;
2165: }
2166: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2167: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2168: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2169: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2170: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2171: PetscCall(PetscFree2(bufr, bufc));
2172: }
2173: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2174: PetscFunctionReturn(PETSC_SUCCESS);
2175: }
2177: /*@
2178: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2179: the same size. Currently, this can only be called once and creates the given matrix.
2181: Not Collective
2183: Input Parameters:
2184: + mat - the matrix
2185: . nb - the number of blocks
2186: . bs - the number of rows (and columns) in each block
2187: . rows - a concatenation of the rows for each block
2188: - v - a concatenation of logically two-dimensional arrays of values
2190: Level: advanced
2192: Note:
2193: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2195: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2197: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2198: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2199: @*/
2200: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2201: {
2202: PetscFunctionBegin;
2205: PetscAssertPointer(rows, 4);
2206: PetscAssertPointer(v, 5);
2207: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2209: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2210: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2211: else {
2212: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2213: }
2214: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2215: PetscFunctionReturn(PETSC_SUCCESS);
2216: }
2218: /*@
2219: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2220: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2221: using a local (per-processor) numbering.
2223: Not Collective
2225: Input Parameters:
2226: + x - the matrix
2227: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2228: - cmapping - column mapping
2230: Level: intermediate
2232: Note:
2233: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2235: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2236: @*/
2237: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2238: {
2239: PetscFunctionBegin;
2244: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2245: else {
2246: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2247: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2248: }
2249: PetscFunctionReturn(PETSC_SUCCESS);
2250: }
2252: /*@
2253: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2255: Not Collective
2257: Input Parameter:
2258: . A - the matrix
2260: Output Parameters:
2261: + rmapping - row mapping
2262: - cmapping - column mapping
2264: Level: advanced
2266: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2267: @*/
2268: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2269: {
2270: PetscFunctionBegin;
2273: if (rmapping) {
2274: PetscAssertPointer(rmapping, 2);
2275: *rmapping = A->rmap->mapping;
2276: }
2277: if (cmapping) {
2278: PetscAssertPointer(cmapping, 3);
2279: *cmapping = A->cmap->mapping;
2280: }
2281: PetscFunctionReturn(PETSC_SUCCESS);
2282: }
2284: /*@
2285: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2287: Logically Collective
2289: Input Parameters:
2290: + A - the matrix
2291: . rmap - row layout
2292: - cmap - column layout
2294: Level: advanced
2296: Note:
2297: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2299: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2300: @*/
2301: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2302: {
2303: PetscFunctionBegin;
2305: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2306: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2307: PetscFunctionReturn(PETSC_SUCCESS);
2308: }
2310: /*@
2311: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2313: Not Collective
2315: Input Parameter:
2316: . A - the matrix
2318: Output Parameters:
2319: + rmap - row layout
2320: - cmap - column layout
2322: Level: advanced
2324: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2325: @*/
2326: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2327: {
2328: PetscFunctionBegin;
2331: if (rmap) {
2332: PetscAssertPointer(rmap, 2);
2333: *rmap = A->rmap;
2334: }
2335: if (cmap) {
2336: PetscAssertPointer(cmap, 3);
2337: *cmap = A->cmap;
2338: }
2339: PetscFunctionReturn(PETSC_SUCCESS);
2340: }
2342: /*@C
2343: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2344: using a local numbering of the nodes.
2346: Not Collective
2348: Input Parameters:
2349: + mat - the matrix
2350: . nrow - number of rows
2351: . irow - the row local indices
2352: . ncol - number of columns
2353: . icol - the column local indices
2354: . y - a logically two-dimensional array of values
2355: - addv - either `INSERT_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2357: Level: intermediate
2359: Notes:
2360: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call MatXXXXSetPreallocation() or
2361: `MatSetUp()` before using this routine
2363: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2365: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2366: options cannot be mixed without intervening calls to the assembly
2367: routines.
2369: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2370: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2372: Developer Notes:
2373: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2374: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2376: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2377: `MatGetValuesLocal()`
2378: @*/
2379: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2380: {
2381: PetscFunctionBeginHot;
2384: MatCheckPreallocated(mat, 1);
2385: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2386: PetscAssertPointer(irow, 3);
2387: PetscAssertPointer(icol, 5);
2388: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2389: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2390: if (PetscDefined(USE_DEBUG)) {
2391: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2392: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2393: }
2395: if (mat->assembled) {
2396: mat->was_assembled = PETSC_TRUE;
2397: mat->assembled = PETSC_FALSE;
2398: }
2399: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2400: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2401: else {
2402: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2403: const PetscInt *irowm, *icolm;
2405: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2406: bufr = buf;
2407: bufc = buf + nrow;
2408: irowm = bufr;
2409: icolm = bufc;
2410: } else {
2411: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2412: irowm = bufr;
2413: icolm = bufc;
2414: }
2415: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2416: else irowm = irow;
2417: if (mat->cmap->mapping) {
2418: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2419: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2420: } else icolm = irowm;
2421: } else icolm = icol;
2422: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2423: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2424: }
2425: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2426: PetscFunctionReturn(PETSC_SUCCESS);
2427: }
2429: /*@C
2430: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2431: using a local ordering of the nodes a block at a time.
2433: Not Collective
2435: Input Parameters:
2436: + mat - the matrix
2437: . nrow - number of rows
2438: . irow - the row local indices
2439: . ncol - number of columns
2440: . icol - the column local indices
2441: . y - a logically two-dimensional array of values
2442: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2444: Level: intermediate
2446: Notes:
2447: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call MatXXXXSetPreallocation() or
2448: `MatSetUp()` before using this routine
2450: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2451: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2453: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2454: options cannot be mixed without intervening calls to the assembly
2455: routines.
2457: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2458: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2460: Developer Notes:
2461: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2462: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2464: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2465: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2466: @*/
2467: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2468: {
2469: PetscFunctionBeginHot;
2472: MatCheckPreallocated(mat, 1);
2473: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2474: PetscAssertPointer(irow, 3);
2475: PetscAssertPointer(icol, 5);
2476: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2477: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2478: if (PetscDefined(USE_DEBUG)) {
2479: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2480: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2481: }
2483: if (mat->assembled) {
2484: mat->was_assembled = PETSC_TRUE;
2485: mat->assembled = PETSC_FALSE;
2486: }
2487: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2488: PetscInt irbs, rbs;
2489: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2490: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2491: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2492: }
2493: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2494: PetscInt icbs, cbs;
2495: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2496: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2497: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2498: }
2499: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2500: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2501: else {
2502: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2503: const PetscInt *irowm, *icolm;
2505: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2506: bufr = buf;
2507: bufc = buf + nrow;
2508: irowm = bufr;
2509: icolm = bufc;
2510: } else {
2511: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2512: irowm = bufr;
2513: icolm = bufc;
2514: }
2515: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2516: else irowm = irow;
2517: if (mat->cmap->mapping) {
2518: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2519: PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2520: } else icolm = irowm;
2521: } else icolm = icol;
2522: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2523: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2524: }
2525: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2526: PetscFunctionReturn(PETSC_SUCCESS);
2527: }
2529: /*@
2530: MatMultDiagonalBlock - Computes the matrix-vector product, y = Dx. Where D is defined by the inode or block structure of the diagonal
2532: Collective
2534: Input Parameters:
2535: + mat - the matrix
2536: - x - the vector to be multiplied
2538: Output Parameter:
2539: . y - the result
2541: Level: developer
2543: Note:
2544: The vectors `x` and `y` cannot be the same. I.e., one cannot
2545: call `MatMultDiagonalBlock`(A,y,y).
2547: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2548: @*/
2549: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2550: {
2551: PetscFunctionBegin;
2557: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2558: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2559: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2560: MatCheckPreallocated(mat, 1);
2562: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2563: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2564: PetscFunctionReturn(PETSC_SUCCESS);
2565: }
2567: /*@
2568: MatMult - Computes the matrix-vector product, y = Ax.
2570: Neighbor-wise Collective
2572: Input Parameters:
2573: + mat - the matrix
2574: - x - the vector to be multiplied
2576: Output Parameter:
2577: . y - the result
2579: Level: beginner
2581: Note:
2582: The vectors `x` and `y` cannot be the same. I.e., one cannot
2583: call `MatMult`(A,y,y).
2585: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2586: @*/
2587: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2588: {
2589: PetscFunctionBegin;
2593: VecCheckAssembled(x);
2595: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2596: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2597: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2598: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2599: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2600: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2601: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2602: PetscCall(VecSetErrorIfLocked(y, 3));
2603: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2604: MatCheckPreallocated(mat, 1);
2606: PetscCall(VecLockReadPush(x));
2607: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2608: PetscUseTypeMethod(mat, mult, x, y);
2609: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2610: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2611: PetscCall(VecLockReadPop(x));
2612: PetscFunctionReturn(PETSC_SUCCESS);
2613: }
2615: /*@
2616: MatMultTranspose - Computes matrix transpose times a vector y = A^T * x.
2618: Neighbor-wise Collective
2620: Input Parameters:
2621: + mat - the matrix
2622: - x - the vector to be multiplied
2624: Output Parameter:
2625: . y - the result
2627: Level: beginner
2629: Notes:
2630: The vectors `x` and `y` cannot be the same. I.e., one cannot
2631: call `MatMultTranspose`(A,y,y).
2633: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2634: use `MatMultHermitianTranspose()`
2636: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2637: @*/
2638: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2639: {
2640: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2642: PetscFunctionBegin;
2646: VecCheckAssembled(x);
2649: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2650: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2651: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2652: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2653: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2654: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2655: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2656: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2657: MatCheckPreallocated(mat, 1);
2659: if (!mat->ops->multtranspose) {
2660: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2661: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2662: } else op = mat->ops->multtranspose;
2663: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2664: PetscCall(VecLockReadPush(x));
2665: PetscCall((*op)(mat, x, y));
2666: PetscCall(VecLockReadPop(x));
2667: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2668: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2669: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2670: PetscFunctionReturn(PETSC_SUCCESS);
2671: }
2673: /*@
2674: MatMultHermitianTranspose - Computes matrix Hermitian transpose times a vector.
2676: Neighbor-wise Collective
2678: Input Parameters:
2679: + mat - the matrix
2680: - x - the vector to be multiplied
2682: Output Parameter:
2683: . y - the result
2685: Level: beginner
2687: Notes:
2688: The vectors `x` and `y` cannot be the same. I.e., one cannot
2689: call `MatMultHermitianTranspose`(A,y,y).
2691: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2693: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2695: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2696: @*/
2697: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2698: {
2699: PetscFunctionBegin;
2705: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2706: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2707: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2708: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2709: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2710: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2711: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2712: MatCheckPreallocated(mat, 1);
2714: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2715: #if defined(PETSC_USE_COMPLEX)
2716: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2717: PetscCall(VecLockReadPush(x));
2718: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2719: else PetscUseTypeMethod(mat, mult, x, y);
2720: PetscCall(VecLockReadPop(x));
2721: } else {
2722: Vec w;
2723: PetscCall(VecDuplicate(x, &w));
2724: PetscCall(VecCopy(x, w));
2725: PetscCall(VecConjugate(w));
2726: PetscCall(MatMultTranspose(mat, w, y));
2727: PetscCall(VecDestroy(&w));
2728: PetscCall(VecConjugate(y));
2729: }
2730: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2731: #else
2732: PetscCall(MatMultTranspose(mat, x, y));
2733: #endif
2734: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2735: PetscFunctionReturn(PETSC_SUCCESS);
2736: }
2738: /*@
2739: MatMultAdd - Computes v3 = v2 + A * v1.
2741: Neighbor-wise Collective
2743: Input Parameters:
2744: + mat - the matrix
2745: . v1 - the vector to be multiplied by `mat`
2746: - v2 - the vector to be added to the result
2748: Output Parameter:
2749: . v3 - the result
2751: Level: beginner
2753: Note:
2754: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2755: call `MatMultAdd`(A,v1,v2,v1).
2757: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2758: @*/
2759: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2760: {
2761: PetscFunctionBegin;
2768: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2769: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2770: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2771: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2772: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2773: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2774: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2775: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2776: MatCheckPreallocated(mat, 1);
2778: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2779: PetscCall(VecLockReadPush(v1));
2780: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2781: PetscCall(VecLockReadPop(v1));
2782: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2783: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2784: PetscFunctionReturn(PETSC_SUCCESS);
2785: }
2787: /*@
2788: MatMultTransposeAdd - Computes v3 = v2 + A' * v1.
2790: Neighbor-wise Collective
2792: Input Parameters:
2793: + mat - the matrix
2794: . v1 - the vector to be multiplied by the transpose of the matrix
2795: - v2 - the vector to be added to the result
2797: Output Parameter:
2798: . v3 - the result
2800: Level: beginner
2802: Note:
2803: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2804: call `MatMultTransposeAdd`(A,v1,v2,v1).
2806: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2807: @*/
2808: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2809: {
2810: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2812: PetscFunctionBegin;
2819: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2820: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2821: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2822: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2823: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2824: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2825: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2826: MatCheckPreallocated(mat, 1);
2828: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2829: PetscCall(VecLockReadPush(v1));
2830: PetscCall((*op)(mat, v1, v2, v3));
2831: PetscCall(VecLockReadPop(v1));
2832: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2833: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2834: PetscFunctionReturn(PETSC_SUCCESS);
2835: }
2837: /*@
2838: MatMultHermitianTransposeAdd - Computes v3 = v2 + A^H * v1.
2840: Neighbor-wise Collective
2842: Input Parameters:
2843: + mat - the matrix
2844: . v1 - the vector to be multiplied by the Hermitian transpose
2845: - v2 - the vector to be added to the result
2847: Output Parameter:
2848: . v3 - the result
2850: Level: beginner
2852: Note:
2853: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2854: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2856: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2857: @*/
2858: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2859: {
2860: PetscFunctionBegin;
2867: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2868: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2869: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2870: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2871: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2872: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2873: MatCheckPreallocated(mat, 1);
2875: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2876: PetscCall(VecLockReadPush(v1));
2877: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2878: else {
2879: Vec w, z;
2880: PetscCall(VecDuplicate(v1, &w));
2881: PetscCall(VecCopy(v1, w));
2882: PetscCall(VecConjugate(w));
2883: PetscCall(VecDuplicate(v3, &z));
2884: PetscCall(MatMultTranspose(mat, w, z));
2885: PetscCall(VecDestroy(&w));
2886: PetscCall(VecConjugate(z));
2887: if (v2 != v3) {
2888: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2889: } else {
2890: PetscCall(VecAXPY(v3, 1.0, z));
2891: }
2892: PetscCall(VecDestroy(&z));
2893: }
2894: PetscCall(VecLockReadPop(v1));
2895: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2896: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2897: PetscFunctionReturn(PETSC_SUCCESS);
2898: }
2900: /*@C
2901: MatGetFactorType - gets the type of factorization it is
2903: Not Collective
2905: Input Parameter:
2906: . mat - the matrix
2908: Output Parameter:
2909: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2911: Level: intermediate
2913: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2914: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2915: @*/
2916: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
2917: {
2918: PetscFunctionBegin;
2921: PetscAssertPointer(t, 2);
2922: *t = mat->factortype;
2923: PetscFunctionReturn(PETSC_SUCCESS);
2924: }
2926: /*@C
2927: MatSetFactorType - sets the type of factorization it is
2929: Logically Collective
2931: Input Parameters:
2932: + mat - the matrix
2933: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2935: Level: intermediate
2937: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2938: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2939: @*/
2940: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
2941: {
2942: PetscFunctionBegin;
2945: mat->factortype = t;
2946: PetscFunctionReturn(PETSC_SUCCESS);
2947: }
2949: /*@C
2950: MatGetInfo - Returns information about matrix storage (number of
2951: nonzeros, memory, etc.).
2953: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
2955: Input Parameters:
2956: + mat - the matrix
2957: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
2959: Output Parameter:
2960: . info - matrix information context
2962: Options Database Key:
2963: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
2965: Notes:
2966: The `MatInfo` context contains a variety of matrix data, including
2967: number of nonzeros allocated and used, number of mallocs during
2968: matrix assembly, etc. Additional information for factored matrices
2969: is provided (such as the fill ratio, number of mallocs during
2970: factorization, etc.).
2972: Example:
2973: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
2974: data within the MatInfo context. For example,
2975: .vb
2976: MatInfo info;
2977: Mat A;
2978: double mal, nz_a, nz_u;
2980: MatGetInfo(A, MAT_LOCAL, &info);
2981: mal = info.mallocs;
2982: nz_a = info.nz_allocated;
2983: .ve
2985: Fortran users should declare info as a double precision
2986: array of dimension `MAT_INFO_SIZE`, and then extract the parameters
2987: of interest. See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
2988: a complete list of parameter names.
2989: .vb
2990: double precision info(MAT_INFO_SIZE)
2991: double precision mal, nz_a
2992: Mat A
2993: integer ierr
2995: call MatGetInfo(A, MAT_LOCAL, info, ierr)
2996: mal = info(MAT_INFO_MALLOCS)
2997: nz_a = info(MAT_INFO_NZ_ALLOCATED)
2998: .ve
3000: Level: intermediate
3002: Developer Notes:
3003: The Fortran interface is not autogenerated as the
3004: interface definition cannot be generated correctly [due to `MatInfo` argument]
3006: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3007: @*/
3008: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3009: {
3010: PetscFunctionBegin;
3013: PetscAssertPointer(info, 3);
3014: MatCheckPreallocated(mat, 1);
3015: PetscUseTypeMethod(mat, getinfo, flag, info);
3016: PetscFunctionReturn(PETSC_SUCCESS);
3017: }
3019: /*
3020: This is used by external packages where it is not easy to get the info from the actual
3021: matrix factorization.
3022: */
3023: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3024: {
3025: PetscFunctionBegin;
3026: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3027: PetscFunctionReturn(PETSC_SUCCESS);
3028: }
3030: /*@C
3031: MatLUFactor - Performs in-place LU factorization of matrix.
3033: Collective
3035: Input Parameters:
3036: + mat - the matrix
3037: . row - row permutation
3038: . col - column permutation
3039: - info - options for factorization, includes
3040: .vb
3041: fill - expected fill as ratio of original fill.
3042: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3043: Run with the option -info to determine an optimal value to use
3044: .ve
3046: Level: developer
3048: Notes:
3049: Most users should employ the `KSP` interface for linear solvers
3050: instead of working directly with matrix algebra routines such as this.
3051: See, e.g., `KSPCreate()`.
3053: This changes the state of the matrix to a factored matrix; it cannot be used
3054: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3056: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3057: when not using `KSP`.
3059: Developer Notes:
3060: The Fortran interface is not autogenerated as the
3061: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3063: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3064: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3065: @*/
3066: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3067: {
3068: MatFactorInfo tinfo;
3070: PetscFunctionBegin;
3074: if (info) PetscAssertPointer(info, 4);
3076: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3077: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3078: MatCheckPreallocated(mat, 1);
3079: if (!info) {
3080: PetscCall(MatFactorInfoInitialize(&tinfo));
3081: info = &tinfo;
3082: }
3084: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3085: PetscUseTypeMethod(mat, lufactor, row, col, info);
3086: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3087: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3088: PetscFunctionReturn(PETSC_SUCCESS);
3089: }
3091: /*@C
3092: MatILUFactor - Performs in-place ILU factorization of matrix.
3094: Collective
3096: Input Parameters:
3097: + mat - the matrix
3098: . row - row permutation
3099: . col - column permutation
3100: - info - structure containing
3101: .vb
3102: levels - number of levels of fill.
3103: expected fill - as ratio of original fill.
3104: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3105: missing diagonal entries)
3106: .ve
3108: Level: developer
3110: Notes:
3111: Most users should employ the `KSP` interface for linear solvers
3112: instead of working directly with matrix algebra routines such as this.
3113: See, e.g., `KSPCreate()`.
3115: Probably really in-place only when level of fill is zero, otherwise allocates
3116: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3117: when not using `KSP`.
3119: Developer Notes:
3120: The Fortran interface is not autogenerated as the
3121: interface definition cannot be generated correctly [due to MatFactorInfo]
3123: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3124: @*/
3125: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3126: {
3127: PetscFunctionBegin;
3131: PetscAssertPointer(info, 4);
3133: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3134: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3135: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3136: MatCheckPreallocated(mat, 1);
3138: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3139: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3140: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3141: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3142: PetscFunctionReturn(PETSC_SUCCESS);
3143: }
3145: /*@C
3146: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3147: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3149: Collective
3151: Input Parameters:
3152: + fact - the factor matrix obtained with `MatGetFactor()`
3153: . mat - the matrix
3154: . row - the row permutation
3155: . col - the column permutation
3156: - info - options for factorization, includes
3157: .vb
3158: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3159: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3160: .ve
3162: Level: developer
3164: Notes:
3165: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3167: Most users should employ the simplified `KSP` interface for linear solvers
3168: instead of working directly with matrix algebra routines such as this.
3169: See, e.g., `KSPCreate()`.
3171: Developer Notes:
3172: The Fortran interface is not autogenerated as the
3173: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3175: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3176: @*/
3177: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3178: {
3179: MatFactorInfo tinfo;
3181: PetscFunctionBegin;
3186: if (info) PetscAssertPointer(info, 5);
3189: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3190: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3191: MatCheckPreallocated(mat, 2);
3192: if (!info) {
3193: PetscCall(MatFactorInfoInitialize(&tinfo));
3194: info = &tinfo;
3195: }
3197: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3198: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3199: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3200: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3201: PetscFunctionReturn(PETSC_SUCCESS);
3202: }
3204: /*@C
3205: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3206: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3208: Collective
3210: Input Parameters:
3211: + fact - the factor matrix obtained with `MatGetFactor()`
3212: . mat - the matrix
3213: - info - options for factorization
3215: Level: developer
3217: Notes:
3218: See `MatLUFactor()` for in-place factorization. See
3219: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3221: Most users should employ the `KSP` interface for linear solvers
3222: instead of working directly with matrix algebra routines such as this.
3223: See, e.g., `KSPCreate()`.
3225: Developer Notes:
3226: The Fortran interface is not autogenerated as the
3227: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3229: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3230: @*/
3231: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3232: {
3233: MatFactorInfo tinfo;
3235: PetscFunctionBegin;
3240: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3241: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3242: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3244: MatCheckPreallocated(mat, 2);
3245: if (!info) {
3246: PetscCall(MatFactorInfoInitialize(&tinfo));
3247: info = &tinfo;
3248: }
3250: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3251: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3252: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3253: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3254: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3255: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3256: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3257: PetscFunctionReturn(PETSC_SUCCESS);
3258: }
3260: /*@C
3261: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3262: symmetric matrix.
3264: Collective
3266: Input Parameters:
3267: + mat - the matrix
3268: . perm - row and column permutations
3269: - info - expected fill as ratio of original fill
3271: Level: developer
3273: Notes:
3274: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3275: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3277: Most users should employ the `KSP` interface for linear solvers
3278: instead of working directly with matrix algebra routines such as this.
3279: See, e.g., `KSPCreate()`.
3281: Developer Notes:
3282: The Fortran interface is not autogenerated as the
3283: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3285: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3286: `MatGetOrdering()`
3287: @*/
3288: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3289: {
3290: MatFactorInfo tinfo;
3292: PetscFunctionBegin;
3295: if (info) PetscAssertPointer(info, 3);
3297: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3298: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3299: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3300: MatCheckPreallocated(mat, 1);
3301: if (!info) {
3302: PetscCall(MatFactorInfoInitialize(&tinfo));
3303: info = &tinfo;
3304: }
3306: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3307: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3308: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3309: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3310: PetscFunctionReturn(PETSC_SUCCESS);
3311: }
3313: /*@C
3314: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3315: of a symmetric matrix.
3317: Collective
3319: Input Parameters:
3320: + fact - the factor matrix obtained with `MatGetFactor()`
3321: . mat - the matrix
3322: . perm - row and column permutations
3323: - info - options for factorization, includes
3324: .vb
3325: fill - expected fill as ratio of original fill.
3326: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3327: Run with the option -info to determine an optimal value to use
3328: .ve
3330: Level: developer
3332: Notes:
3333: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3334: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3336: Most users should employ the `KSP` interface for linear solvers
3337: instead of working directly with matrix algebra routines such as this.
3338: See, e.g., `KSPCreate()`.
3340: Developer Notes:
3341: The Fortran interface is not autogenerated as the
3342: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3344: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3345: `MatGetOrdering()`
3346: @*/
3347: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3348: {
3349: MatFactorInfo tinfo;
3351: PetscFunctionBegin;
3355: if (info) PetscAssertPointer(info, 4);
3358: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3359: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3360: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3361: MatCheckPreallocated(mat, 2);
3362: if (!info) {
3363: PetscCall(MatFactorInfoInitialize(&tinfo));
3364: info = &tinfo;
3365: }
3367: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3368: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3369: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3370: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3371: PetscFunctionReturn(PETSC_SUCCESS);
3372: }
3374: /*@C
3375: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3376: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3377: `MatCholeskyFactorSymbolic()`.
3379: Collective
3381: Input Parameters:
3382: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3383: . mat - the initial matrix that is to be factored
3384: - info - options for factorization
3386: Level: developer
3388: Note:
3389: Most users should employ the `KSP` interface for linear solvers
3390: instead of working directly with matrix algebra routines such as this.
3391: See, e.g., `KSPCreate()`.
3393: Developer Notes:
3394: The Fortran interface is not autogenerated as the
3395: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3397: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3398: @*/
3399: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3400: {
3401: MatFactorInfo tinfo;
3403: PetscFunctionBegin;
3408: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3409: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3410: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3411: MatCheckPreallocated(mat, 2);
3412: if (!info) {
3413: PetscCall(MatFactorInfoInitialize(&tinfo));
3414: info = &tinfo;
3415: }
3417: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3418: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3419: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3420: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3421: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3422: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3423: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3424: PetscFunctionReturn(PETSC_SUCCESS);
3425: }
3427: /*@
3428: MatQRFactor - Performs in-place QR factorization of matrix.
3430: Collective
3432: Input Parameters:
3433: + mat - the matrix
3434: . col - column permutation
3435: - info - options for factorization, includes
3436: .vb
3437: fill - expected fill as ratio of original fill.
3438: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3439: Run with the option -info to determine an optimal value to use
3440: .ve
3442: Level: developer
3444: Notes:
3445: Most users should employ the `KSP` interface for linear solvers
3446: instead of working directly with matrix algebra routines such as this.
3447: See, e.g., `KSPCreate()`.
3449: This changes the state of the matrix to a factored matrix; it cannot be used
3450: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3452: Developer Notes:
3453: The Fortran interface is not autogenerated as the
3454: interface definition cannot be generated correctly [due to MatFactorInfo]
3456: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3457: `MatSetUnfactored()`
3458: @*/
3459: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3460: {
3461: PetscFunctionBegin;
3464: if (info) PetscAssertPointer(info, 3);
3466: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3467: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3468: MatCheckPreallocated(mat, 1);
3469: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3470: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3471: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3472: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3473: PetscFunctionReturn(PETSC_SUCCESS);
3474: }
3476: /*@
3477: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3478: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3480: Collective
3482: Input Parameters:
3483: + fact - the factor matrix obtained with `MatGetFactor()`
3484: . mat - the matrix
3485: . col - column permutation
3486: - info - options for factorization, includes
3487: .vb
3488: fill - expected fill as ratio of original fill.
3489: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3490: Run with the option -info to determine an optimal value to use
3491: .ve
3493: Level: developer
3495: Note:
3496: Most users should employ the `KSP` interface for linear solvers
3497: instead of working directly with matrix algebra routines such as this.
3498: See, e.g., `KSPCreate()`.
3500: Developer Notes:
3501: The Fortran interface is not autogenerated as the
3502: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3504: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3505: @*/
3506: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3507: {
3508: MatFactorInfo tinfo;
3510: PetscFunctionBegin;
3514: if (info) PetscAssertPointer(info, 4);
3517: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3518: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3519: MatCheckPreallocated(mat, 2);
3520: if (!info) {
3521: PetscCall(MatFactorInfoInitialize(&tinfo));
3522: info = &tinfo;
3523: }
3525: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3526: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3527: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3528: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3529: PetscFunctionReturn(PETSC_SUCCESS);
3530: }
3532: /*@
3533: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3534: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3536: Collective
3538: Input Parameters:
3539: + fact - the factor matrix obtained with `MatGetFactor()`
3540: . mat - the matrix
3541: - info - options for factorization
3543: Level: developer
3545: Notes:
3546: See `MatQRFactor()` for in-place factorization.
3548: Most users should employ the `KSP` interface for linear solvers
3549: instead of working directly with matrix algebra routines such as this.
3550: See, e.g., `KSPCreate()`.
3552: Developer Notes:
3553: The Fortran interface is not autogenerated as the
3554: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3556: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3557: @*/
3558: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3559: {
3560: MatFactorInfo tinfo;
3562: PetscFunctionBegin;
3567: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3568: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3569: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3571: MatCheckPreallocated(mat, 2);
3572: if (!info) {
3573: PetscCall(MatFactorInfoInitialize(&tinfo));
3574: info = &tinfo;
3575: }
3577: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3578: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3579: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3580: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3581: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3582: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3583: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3584: PetscFunctionReturn(PETSC_SUCCESS);
3585: }
3587: /*@
3588: MatSolve - Solves A x = b, given a factored matrix.
3590: Neighbor-wise Collective
3592: Input Parameters:
3593: + mat - the factored matrix
3594: - b - the right-hand-side vector
3596: Output Parameter:
3597: . x - the result vector
3599: Level: developer
3601: Notes:
3602: The vectors `b` and `x` cannot be the same. I.e., one cannot
3603: call `MatSolve`(A,x,x).
3605: Most users should employ the `KSP` interface for linear solvers
3606: instead of working directly with matrix algebra routines such as this.
3607: See, e.g., `KSPCreate()`.
3609: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3610: @*/
3611: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3612: {
3613: PetscFunctionBegin;
3618: PetscCheckSameComm(mat, 1, b, 2);
3619: PetscCheckSameComm(mat, 1, x, 3);
3620: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3621: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3622: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3623: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3624: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3625: MatCheckPreallocated(mat, 1);
3627: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3628: if (mat->factorerrortype) {
3629: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3630: PetscCall(VecSetInf(x));
3631: } else PetscUseTypeMethod(mat, solve, b, x);
3632: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3633: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3634: PetscFunctionReturn(PETSC_SUCCESS);
3635: }
3637: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3638: {
3639: Vec b, x;
3640: PetscInt N, i;
3641: PetscErrorCode (*f)(Mat, Vec, Vec);
3642: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3644: PetscFunctionBegin;
3645: if (A->factorerrortype) {
3646: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3647: PetscCall(MatSetInf(X));
3648: PetscFunctionReturn(PETSC_SUCCESS);
3649: }
3650: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3651: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3652: PetscCall(MatBoundToCPU(A, &Abound));
3653: if (!Abound) {
3654: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3655: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3656: }
3657: #if PetscDefined(HAVE_CUDA)
3658: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3659: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3660: #elif PetscDefined(HAVE_HIP)
3661: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3662: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3663: #endif
3664: PetscCall(MatGetSize(B, NULL, &N));
3665: for (i = 0; i < N; i++) {
3666: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3667: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3668: PetscCall((*f)(A, b, x));
3669: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3670: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3671: }
3672: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3673: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3674: PetscFunctionReturn(PETSC_SUCCESS);
3675: }
3677: /*@
3678: MatMatSolve - Solves A X = B, given a factored matrix.
3680: Neighbor-wise Collective
3682: Input Parameters:
3683: + A - the factored matrix
3684: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3686: Output Parameter:
3687: . X - the result matrix (dense matrix)
3689: Level: developer
3691: Note:
3692: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3693: otherwise, `B` and `X` cannot be the same.
3695: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3696: @*/
3697: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3698: {
3699: PetscFunctionBegin;
3704: PetscCheckSameComm(A, 1, B, 2);
3705: PetscCheckSameComm(A, 1, X, 3);
3706: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3707: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3708: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3709: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3710: MatCheckPreallocated(A, 1);
3712: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3713: if (!A->ops->matsolve) {
3714: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3715: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3716: } else PetscUseTypeMethod(A, matsolve, B, X);
3717: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3718: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3719: PetscFunctionReturn(PETSC_SUCCESS);
3720: }
3722: /*@
3723: MatMatSolveTranspose - Solves A^T X = B, given a factored matrix.
3725: Neighbor-wise Collective
3727: Input Parameters:
3728: + A - the factored matrix
3729: - B - the right-hand-side matrix (`MATDENSE` matrix)
3731: Output Parameter:
3732: . X - the result matrix (dense matrix)
3734: Level: developer
3736: Note:
3737: The matrices `B` and `X` cannot be the same. I.e., one cannot
3738: call `MatMatSolveTranspose`(A,X,X).
3740: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3741: @*/
3742: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3743: {
3744: PetscFunctionBegin;
3749: PetscCheckSameComm(A, 1, B, 2);
3750: PetscCheckSameComm(A, 1, X, 3);
3751: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3752: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3753: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3754: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3755: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3756: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3757: MatCheckPreallocated(A, 1);
3759: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3760: if (!A->ops->matsolvetranspose) {
3761: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3762: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3763: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3764: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3765: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3766: PetscFunctionReturn(PETSC_SUCCESS);
3767: }
3769: /*@
3770: MatMatTransposeSolve - Solves A X = B^T, given a factored matrix.
3772: Neighbor-wise Collective
3774: Input Parameters:
3775: + A - the factored matrix
3776: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3778: Output Parameter:
3779: . X - the result matrix (dense matrix)
3781: Level: developer
3783: Note:
3784: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right hand side matrix. User must create B^T in sparse compressed row
3785: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3787: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3788: @*/
3789: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3790: {
3791: PetscFunctionBegin;
3796: PetscCheckSameComm(A, 1, Bt, 2);
3797: PetscCheckSameComm(A, 1, X, 3);
3799: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3800: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3801: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3802: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3803: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3804: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3805: MatCheckPreallocated(A, 1);
3807: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3808: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3809: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3810: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3811: PetscFunctionReturn(PETSC_SUCCESS);
3812: }
3814: /*@
3815: MatForwardSolve - Solves L x = b, given a factored matrix, A = LU, or
3816: U^T*D^(1/2) x = b, given a factored symmetric matrix, A = U^T*D*U,
3818: Neighbor-wise Collective
3820: Input Parameters:
3821: + mat - the factored matrix
3822: - b - the right-hand-side vector
3824: Output Parameter:
3825: . x - the result vector
3827: Level: developer
3829: Notes:
3830: `MatSolve()` should be used for most applications, as it performs
3831: a forward solve followed by a backward solve.
3833: The vectors `b` and `x` cannot be the same, i.e., one cannot
3834: call `MatForwardSolve`(A,x,x).
3836: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3837: the diagonal blocks are not implemented as D = D^(1/2) * D^(1/2) yet.
3838: `MatForwardSolve()` solves U^T*D y = b, and
3839: `MatBackwardSolve()` solves U x = y.
3840: Thus they do not provide a symmetric preconditioner.
3842: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3843: @*/
3844: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3845: {
3846: PetscFunctionBegin;
3851: PetscCheckSameComm(mat, 1, b, 2);
3852: PetscCheckSameComm(mat, 1, x, 3);
3853: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3854: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3855: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3856: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3857: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3858: MatCheckPreallocated(mat, 1);
3860: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3861: PetscUseTypeMethod(mat, forwardsolve, b, x);
3862: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3863: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3864: PetscFunctionReturn(PETSC_SUCCESS);
3865: }
3867: /*@
3868: MatBackwardSolve - Solves U x = b, given a factored matrix, A = LU.
3869: D^(1/2) U x = b, given a factored symmetric matrix, A = U^T*D*U,
3871: Neighbor-wise Collective
3873: Input Parameters:
3874: + mat - the factored matrix
3875: - b - the right-hand-side vector
3877: Output Parameter:
3878: . x - the result vector
3880: Level: developer
3882: Notes:
3883: `MatSolve()` should be used for most applications, as it performs
3884: a forward solve followed by a backward solve.
3886: The vectors `b` and `x` cannot be the same. I.e., one cannot
3887: call `MatBackwardSolve`(A,x,x).
3889: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3890: the diagonal blocks are not implemented as D = D^(1/2) * D^(1/2) yet.
3891: `MatForwardSolve()` solves U^T*D y = b, and
3892: `MatBackwardSolve()` solves U x = y.
3893: Thus they do not provide a symmetric preconditioner.
3895: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3896: @*/
3897: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3898: {
3899: PetscFunctionBegin;
3904: PetscCheckSameComm(mat, 1, b, 2);
3905: PetscCheckSameComm(mat, 1, x, 3);
3906: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3907: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3908: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3909: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3910: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3911: MatCheckPreallocated(mat, 1);
3913: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3914: PetscUseTypeMethod(mat, backwardsolve, b, x);
3915: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3916: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3917: PetscFunctionReturn(PETSC_SUCCESS);
3918: }
3920: /*@
3921: MatSolveAdd - Computes x = y + inv(A)*b, given a factored matrix.
3923: Neighbor-wise Collective
3925: Input Parameters:
3926: + mat - the factored matrix
3927: . b - the right-hand-side vector
3928: - y - the vector to be added to
3930: Output Parameter:
3931: . x - the result vector
3933: Level: developer
3935: Note:
3936: The vectors `b` and `x` cannot be the same. I.e., one cannot
3937: call `MatSolveAdd`(A,x,y,x).
3939: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3940: @*/
3941: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
3942: {
3943: PetscScalar one = 1.0;
3944: Vec tmp;
3946: PetscFunctionBegin;
3952: PetscCheckSameComm(mat, 1, b, 2);
3953: PetscCheckSameComm(mat, 1, y, 3);
3954: PetscCheckSameComm(mat, 1, x, 4);
3955: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3956: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3957: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3958: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
3959: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3960: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
3961: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3962: MatCheckPreallocated(mat, 1);
3964: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
3965: if (mat->factorerrortype) {
3966: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3967: PetscCall(VecSetInf(x));
3968: } else if (mat->ops->solveadd) {
3969: PetscUseTypeMethod(mat, solveadd, b, y, x);
3970: } else {
3971: /* do the solve then the add manually */
3972: if (x != y) {
3973: PetscCall(MatSolve(mat, b, x));
3974: PetscCall(VecAXPY(x, one, y));
3975: } else {
3976: PetscCall(VecDuplicate(x, &tmp));
3977: PetscCall(VecCopy(x, tmp));
3978: PetscCall(MatSolve(mat, b, x));
3979: PetscCall(VecAXPY(x, one, tmp));
3980: PetscCall(VecDestroy(&tmp));
3981: }
3982: }
3983: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
3984: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3985: PetscFunctionReturn(PETSC_SUCCESS);
3986: }
3988: /*@
3989: MatSolveTranspose - Solves A' x = b, given a factored matrix.
3991: Neighbor-wise Collective
3993: Input Parameters:
3994: + mat - the factored matrix
3995: - b - the right-hand-side vector
3997: Output Parameter:
3998: . x - the result vector
4000: Level: developer
4002: Notes:
4003: The vectors `b` and `x` cannot be the same. I.e., one cannot
4004: call `MatSolveTranspose`(A,x,x).
4006: Most users should employ the `KSP` interface for linear solvers
4007: instead of working directly with matrix algebra routines such as this.
4008: See, e.g., `KSPCreate()`.
4010: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4011: @*/
4012: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4013: {
4014: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4016: PetscFunctionBegin;
4021: PetscCheckSameComm(mat, 1, b, 2);
4022: PetscCheckSameComm(mat, 1, x, 3);
4023: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4024: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4025: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4026: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4027: MatCheckPreallocated(mat, 1);
4028: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4029: if (mat->factorerrortype) {
4030: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4031: PetscCall(VecSetInf(x));
4032: } else {
4033: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4034: PetscCall((*f)(mat, b, x));
4035: }
4036: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4037: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4038: PetscFunctionReturn(PETSC_SUCCESS);
4039: }
4041: /*@
4042: MatSolveTransposeAdd - Computes x = y + inv(Transpose(A)) b, given a
4043: factored matrix.
4045: Neighbor-wise Collective
4047: Input Parameters:
4048: + mat - the factored matrix
4049: . b - the right-hand-side vector
4050: - y - the vector to be added to
4052: Output Parameter:
4053: . x - the result vector
4055: Level: developer
4057: Note:
4058: The vectors `b` and `x` cannot be the same. I.e., one cannot
4059: call `MatSolveTransposeAdd`(A,x,y,x).
4061: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4062: @*/
4063: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4064: {
4065: PetscScalar one = 1.0;
4066: Vec tmp;
4067: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4069: PetscFunctionBegin;
4075: PetscCheckSameComm(mat, 1, b, 2);
4076: PetscCheckSameComm(mat, 1, y, 3);
4077: PetscCheckSameComm(mat, 1, x, 4);
4078: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4079: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4080: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4081: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4082: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4083: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4084: MatCheckPreallocated(mat, 1);
4086: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4087: if (mat->factorerrortype) {
4088: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4089: PetscCall(VecSetInf(x));
4090: } else if (f) {
4091: PetscCall((*f)(mat, b, y, x));
4092: } else {
4093: /* do the solve then the add manually */
4094: if (x != y) {
4095: PetscCall(MatSolveTranspose(mat, b, x));
4096: PetscCall(VecAXPY(x, one, y));
4097: } else {
4098: PetscCall(VecDuplicate(x, &tmp));
4099: PetscCall(VecCopy(x, tmp));
4100: PetscCall(MatSolveTranspose(mat, b, x));
4101: PetscCall(VecAXPY(x, one, tmp));
4102: PetscCall(VecDestroy(&tmp));
4103: }
4104: }
4105: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4106: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4107: PetscFunctionReturn(PETSC_SUCCESS);
4108: }
4110: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4111: /*@
4112: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4114: Neighbor-wise Collective
4116: Input Parameters:
4117: + mat - the matrix
4118: . b - the right hand side
4119: . omega - the relaxation factor
4120: . flag - flag indicating the type of SOR (see below)
4121: . shift - diagonal shift
4122: . its - the number of iterations
4123: - lits - the number of local iterations
4125: Output Parameter:
4126: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4128: SOR Flags:
4129: + `SOR_FORWARD_SWEEP` - forward SOR
4130: . `SOR_BACKWARD_SWEEP` - backward SOR
4131: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4132: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4133: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4134: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4135: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4136: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4137: upper/lower triangular part of matrix to
4138: vector (with omega)
4139: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4141: Level: developer
4143: Notes:
4144: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4145: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4146: on each processor.
4148: Application programmers will not generally use `MatSOR()` directly,
4149: but instead will employ the `KSP`/`PC` interface.
4151: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4153: Most users should employ the `KSP` interface for linear solvers
4154: instead of working directly with matrix algebra routines such as this.
4155: See, e.g., `KSPCreate()`.
4157: Vectors `x` and `b` CANNOT be the same
4159: The flags are implemented as bitwise inclusive or operations.
4160: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4161: to specify a zero initial guess for SSOR.
4163: Developer Notes:
4164: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4166: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4167: @*/
4168: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4169: {
4170: PetscFunctionBegin;
4175: PetscCheckSameComm(mat, 1, b, 2);
4176: PetscCheckSameComm(mat, 1, x, 8);
4177: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4178: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4179: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4180: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4181: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4182: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4183: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4184: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4186: MatCheckPreallocated(mat, 1);
4187: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4188: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4189: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4190: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4191: PetscFunctionReturn(PETSC_SUCCESS);
4192: }
4194: /*
4195: Default matrix copy routine.
4196: */
4197: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4198: {
4199: PetscInt i, rstart = 0, rend = 0, nz;
4200: const PetscInt *cwork;
4201: const PetscScalar *vwork;
4203: PetscFunctionBegin;
4204: if (B->assembled) PetscCall(MatZeroEntries(B));
4205: if (str == SAME_NONZERO_PATTERN) {
4206: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4207: for (i = rstart; i < rend; i++) {
4208: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4209: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4210: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4211: }
4212: } else {
4213: PetscCall(MatAYPX(B, 0.0, A, str));
4214: }
4215: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4216: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4217: PetscFunctionReturn(PETSC_SUCCESS);
4218: }
4220: /*@
4221: MatCopy - Copies a matrix to another matrix.
4223: Collective
4225: Input Parameters:
4226: + A - the matrix
4227: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4229: Output Parameter:
4230: . B - where the copy is put
4232: Level: intermediate
4234: Notes:
4235: If you use `SAME_NONZERO_PATTERN` then the two matrices must have the same nonzero pattern or the routine will crash.
4237: `MatCopy()` copies the matrix entries of a matrix to another existing
4238: matrix (after first zeroing the second matrix). A related routine is
4239: `MatConvert()`, which first creates a new matrix and then copies the data.
4241: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4242: @*/
4243: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4244: {
4245: PetscInt i;
4247: PetscFunctionBegin;
4252: PetscCheckSameComm(A, 1, B, 2);
4253: MatCheckPreallocated(B, 2);
4254: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4255: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4256: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4257: A->cmap->N, B->cmap->N);
4258: MatCheckPreallocated(A, 1);
4259: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4261: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4262: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4263: else PetscCall(MatCopy_Basic(A, B, str));
4265: B->stencil.dim = A->stencil.dim;
4266: B->stencil.noc = A->stencil.noc;
4267: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4268: B->stencil.dims[i] = A->stencil.dims[i];
4269: B->stencil.starts[i] = A->stencil.starts[i];
4270: }
4272: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4273: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4274: PetscFunctionReturn(PETSC_SUCCESS);
4275: }
4277: /*@C
4278: MatConvert - Converts a matrix to another matrix, either of the same
4279: or different type.
4281: Collective
4283: Input Parameters:
4284: + mat - the matrix
4285: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4286: same type as the original matrix.
4287: - reuse - denotes if the destination matrix is to be created or reused.
4288: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use
4289: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4291: Output Parameter:
4292: . M - pointer to place new matrix
4294: Level: intermediate
4296: Notes:
4297: `MatConvert()` first creates a new matrix and then copies the data from
4298: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4299: entries of one matrix to another already existing matrix context.
4301: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4302: the MPI communicator of the generated matrix is always the same as the communicator
4303: of the input matrix.
4305: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4306: @*/
4307: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4308: {
4309: PetscBool sametype, issame, flg;
4310: PetscBool3 issymmetric, ishermitian;
4311: char convname[256], mtype[256];
4312: Mat B;
4314: PetscFunctionBegin;
4317: PetscAssertPointer(M, 4);
4318: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4319: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4320: MatCheckPreallocated(mat, 1);
4322: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4323: if (flg) newtype = mtype;
4325: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4326: PetscCall(PetscStrcmp(newtype, "same", &issame));
4327: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4328: PetscCheck(!(reuse == MAT_REUSE_MATRIX) || !(mat == *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4330: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4331: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4332: PetscFunctionReturn(PETSC_SUCCESS);
4333: }
4335: /* Cache Mat options because some converters use MatHeaderReplace */
4336: issymmetric = mat->symmetric;
4337: ishermitian = mat->hermitian;
4339: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4340: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4341: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4342: } else {
4343: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4344: const char *prefix[3] = {"seq", "mpi", ""};
4345: PetscInt i;
4346: /*
4347: Order of precedence:
4348: 0) See if newtype is a superclass of the current matrix.
4349: 1) See if a specialized converter is known to the current matrix.
4350: 2) See if a specialized converter is known to the desired matrix class.
4351: 3) See if a good general converter is registered for the desired class
4352: (as of 6/27/03 only MATMPIADJ falls into this category).
4353: 4) See if a good general converter is known for the current matrix.
4354: 5) Use a really basic converter.
4355: */
4357: /* 0) See if newtype is a superclass of the current matrix.
4358: i.e mat is mpiaij and newtype is aij */
4359: for (i = 0; i < 2; i++) {
4360: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4361: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4362: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4363: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4364: if (flg) {
4365: if (reuse == MAT_INPLACE_MATRIX) {
4366: PetscCall(PetscInfo(mat, "Early return\n"));
4367: PetscFunctionReturn(PETSC_SUCCESS);
4368: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4369: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4370: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4371: PetscFunctionReturn(PETSC_SUCCESS);
4372: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4373: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4374: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4375: PetscFunctionReturn(PETSC_SUCCESS);
4376: }
4377: }
4378: }
4379: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4380: for (i = 0; i < 3; i++) {
4381: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4382: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4383: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4384: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4385: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4386: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4387: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4388: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4389: if (conv) goto foundconv;
4390: }
4392: /* 2) See if a specialized converter is known to the desired matrix class. */
4393: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4394: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4395: PetscCall(MatSetType(B, newtype));
4396: for (i = 0; i < 3; i++) {
4397: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4398: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4399: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4400: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4401: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4402: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4403: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4404: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4405: if (conv) {
4406: PetscCall(MatDestroy(&B));
4407: goto foundconv;
4408: }
4409: }
4411: /* 3) See if a good general converter is registered for the desired class */
4412: conv = B->ops->convertfrom;
4413: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4414: PetscCall(MatDestroy(&B));
4415: if (conv) goto foundconv;
4417: /* 4) See if a good general converter is known for the current matrix */
4418: if (mat->ops->convert) conv = mat->ops->convert;
4419: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4420: if (conv) goto foundconv;
4422: /* 5) Use a really basic converter. */
4423: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4424: conv = MatConvert_Basic;
4426: foundconv:
4427: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4428: PetscCall((*conv)(mat, newtype, reuse, M));
4429: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4430: /* the block sizes must be same if the mappings are copied over */
4431: (*M)->rmap->bs = mat->rmap->bs;
4432: (*M)->cmap->bs = mat->cmap->bs;
4433: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4434: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4435: (*M)->rmap->mapping = mat->rmap->mapping;
4436: (*M)->cmap->mapping = mat->cmap->mapping;
4437: }
4438: (*M)->stencil.dim = mat->stencil.dim;
4439: (*M)->stencil.noc = mat->stencil.noc;
4440: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4441: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4442: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4443: }
4444: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4445: }
4446: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4448: /* Copy Mat options */
4449: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4450: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4451: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4452: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4453: PetscFunctionReturn(PETSC_SUCCESS);
4454: }
4456: /*@C
4457: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4459: Not Collective
4461: Input Parameter:
4462: . mat - the matrix, must be a factored matrix
4464: Output Parameter:
4465: . type - the string name of the package (do not free this string)
4467: Level: intermediate
4469: Fortran Notes:
4470: Pass in an empty string and the package name will be copied into it. Make sure the string is long enough.
4472: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4473: @*/
4474: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4475: {
4476: PetscErrorCode (*conv)(Mat, MatSolverType *);
4478: PetscFunctionBegin;
4481: PetscAssertPointer(type, 2);
4482: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4483: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4484: if (conv) PetscCall((*conv)(mat, type));
4485: else *type = MATSOLVERPETSC;
4486: PetscFunctionReturn(PETSC_SUCCESS);
4487: }
4489: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4490: struct _MatSolverTypeForSpecifcType {
4491: MatType mtype;
4492: /* no entry for MAT_FACTOR_NONE */
4493: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4494: MatSolverTypeForSpecifcType next;
4495: };
4497: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4498: struct _MatSolverTypeHolder {
4499: char *name;
4500: MatSolverTypeForSpecifcType handlers;
4501: MatSolverTypeHolder next;
4502: };
4504: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4506: /*@C
4507: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4509: Input Parameters:
4510: + package - name of the package, for example petsc or superlu
4511: . mtype - the matrix type that works with this package
4512: . ftype - the type of factorization supported by the package
4513: - createfactor - routine that will create the factored matrix ready to be used
4515: Level: developer
4517: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`
4518: @*/
4519: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4520: {
4521: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4522: PetscBool flg;
4523: MatSolverTypeForSpecifcType inext, iprev = NULL;
4525: PetscFunctionBegin;
4526: PetscCall(MatInitializePackage());
4527: if (!next) {
4528: PetscCall(PetscNew(&MatSolverTypeHolders));
4529: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4530: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4531: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4532: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4533: PetscFunctionReturn(PETSC_SUCCESS);
4534: }
4535: while (next) {
4536: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4537: if (flg) {
4538: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4539: inext = next->handlers;
4540: while (inext) {
4541: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4542: if (flg) {
4543: inext->createfactor[(int)ftype - 1] = createfactor;
4544: PetscFunctionReturn(PETSC_SUCCESS);
4545: }
4546: iprev = inext;
4547: inext = inext->next;
4548: }
4549: PetscCall(PetscNew(&iprev->next));
4550: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4551: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4552: PetscFunctionReturn(PETSC_SUCCESS);
4553: }
4554: prev = next;
4555: next = next->next;
4556: }
4557: PetscCall(PetscNew(&prev->next));
4558: PetscCall(PetscStrallocpy(package, &prev->next->name));
4559: PetscCall(PetscNew(&prev->next->handlers));
4560: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4561: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4562: PetscFunctionReturn(PETSC_SUCCESS);
4563: }
4565: /*@C
4566: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4568: Input Parameters:
4569: + type - name of the package, for example petsc or superlu
4570: . ftype - the type of factorization supported by the type
4571: - mtype - the matrix type that works with this type
4573: Output Parameters:
4574: + foundtype - `PETSC_TRUE` if the type was registered
4575: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4576: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4578: Level: developer
4580: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`
4581: @*/
4582: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat, MatFactorType, Mat *))
4583: {
4584: MatSolverTypeHolder next = MatSolverTypeHolders;
4585: PetscBool flg;
4586: MatSolverTypeForSpecifcType inext;
4588: PetscFunctionBegin;
4589: if (foundtype) *foundtype = PETSC_FALSE;
4590: if (foundmtype) *foundmtype = PETSC_FALSE;
4591: if (createfactor) *createfactor = NULL;
4593: if (type) {
4594: while (next) {
4595: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4596: if (flg) {
4597: if (foundtype) *foundtype = PETSC_TRUE;
4598: inext = next->handlers;
4599: while (inext) {
4600: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4601: if (flg) {
4602: if (foundmtype) *foundmtype = PETSC_TRUE;
4603: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4604: PetscFunctionReturn(PETSC_SUCCESS);
4605: }
4606: inext = inext->next;
4607: }
4608: }
4609: next = next->next;
4610: }
4611: } else {
4612: while (next) {
4613: inext = next->handlers;
4614: while (inext) {
4615: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4616: if (flg && inext->createfactor[(int)ftype - 1]) {
4617: if (foundtype) *foundtype = PETSC_TRUE;
4618: if (foundmtype) *foundmtype = PETSC_TRUE;
4619: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4620: PetscFunctionReturn(PETSC_SUCCESS);
4621: }
4622: inext = inext->next;
4623: }
4624: next = next->next;
4625: }
4626: /* try with base classes inext->mtype */
4627: next = MatSolverTypeHolders;
4628: while (next) {
4629: inext = next->handlers;
4630: while (inext) {
4631: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4632: if (flg && inext->createfactor[(int)ftype - 1]) {
4633: if (foundtype) *foundtype = PETSC_TRUE;
4634: if (foundmtype) *foundmtype = PETSC_TRUE;
4635: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4636: PetscFunctionReturn(PETSC_SUCCESS);
4637: }
4638: inext = inext->next;
4639: }
4640: next = next->next;
4641: }
4642: }
4643: PetscFunctionReturn(PETSC_SUCCESS);
4644: }
4646: PetscErrorCode MatSolverTypeDestroy(void)
4647: {
4648: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4649: MatSolverTypeForSpecifcType inext, iprev;
4651: PetscFunctionBegin;
4652: while (next) {
4653: PetscCall(PetscFree(next->name));
4654: inext = next->handlers;
4655: while (inext) {
4656: PetscCall(PetscFree(inext->mtype));
4657: iprev = inext;
4658: inext = inext->next;
4659: PetscCall(PetscFree(iprev));
4660: }
4661: prev = next;
4662: next = next->next;
4663: PetscCall(PetscFree(prev));
4664: }
4665: MatSolverTypeHolders = NULL;
4666: PetscFunctionReturn(PETSC_SUCCESS);
4667: }
4669: /*@C
4670: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4672: Logically Collective
4674: Input Parameter:
4675: . mat - the matrix
4677: Output Parameter:
4678: . flg - `PETSC_TRUE` if uses the ordering
4680: Level: developer
4682: Note:
4683: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4684: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4686: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4687: @*/
4688: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4689: {
4690: PetscFunctionBegin;
4691: *flg = mat->canuseordering;
4692: PetscFunctionReturn(PETSC_SUCCESS);
4693: }
4695: /*@C
4696: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4698: Logically Collective
4700: Input Parameters:
4701: + mat - the matrix obtained with `MatGetFactor()`
4702: - ftype - the factorization type to be used
4704: Output Parameter:
4705: . otype - the preferred ordering type
4707: Level: developer
4709: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4710: @*/
4711: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4712: {
4713: PetscFunctionBegin;
4714: *otype = mat->preferredordering[ftype];
4715: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4716: PetscFunctionReturn(PETSC_SUCCESS);
4717: }
4719: /*@C
4720: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic()
4722: Collective
4724: Input Parameters:
4725: + mat - the matrix
4726: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4727: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4729: Output Parameter:
4730: . f - the factor matrix used with MatXXFactorSymbolic() calls. Can be `NULL` in some cases, see notes below.
4732: Options Database Key:
4733: . -mat_factor_bind_factorization <host, device> - Where to do matrix factorization? Default is device (might consume more device memory.
4734: One can choose host to save device memory). Currently only supported with `MATSEQAIJCUSPARSE` matrices.
4736: Level: intermediate
4738: Notes:
4739: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4740: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4742: Users usually access the factorization solvers via `KSP`
4744: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4745: such as pastix, superlu, mumps etc.
4747: PETSc must have been ./configure to use the external solver, using the option --download-package
4749: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4750: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4751: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4753: Developer Notes:
4754: This should actually be called `MatCreateFactor()` since it creates a new factor object
4756: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`,
4757: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4758: @*/
4759: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4760: {
4761: PetscBool foundtype, foundmtype;
4762: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4764: PetscFunctionBegin;
4768: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4769: MatCheckPreallocated(mat, 1);
4771: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4772: if (!foundtype) {
4773: if (type) {
4774: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4775: ((PetscObject)mat)->type_name, type);
4776: } else {
4777: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4778: }
4779: }
4780: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4781: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4783: PetscCall((*conv)(mat, ftype, f));
4784: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4785: PetscFunctionReturn(PETSC_SUCCESS);
4786: }
4788: /*@C
4789: MatGetFactorAvailable - Returns a a flag if matrix supports particular type and factor type
4791: Not Collective
4793: Input Parameters:
4794: + mat - the matrix
4795: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4796: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4798: Output Parameter:
4799: . flg - PETSC_TRUE if the factorization is available
4801: Level: intermediate
4803: Notes:
4804: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4805: such as pastix, superlu, mumps etc.
4807: PETSc must have been ./configure to use the external solver, using the option --download-package
4809: Developer Notes:
4810: This should actually be called MatCreateFactorAvailable() since MatGetFactor() creates a new factor object
4812: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4813: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4814: @*/
4815: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4816: {
4817: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4819: PetscFunctionBegin;
4822: PetscAssertPointer(flg, 4);
4824: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4825: MatCheckPreallocated(mat, 1);
4827: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4828: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4829: PetscFunctionReturn(PETSC_SUCCESS);
4830: }
4832: /*@
4833: MatDuplicate - Duplicates a matrix including the non-zero structure.
4835: Collective
4837: Input Parameters:
4838: + mat - the matrix
4839: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4840: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4842: Output Parameter:
4843: . M - pointer to place new matrix
4845: Level: intermediate
4847: Notes:
4848: You cannot change the nonzero pattern for the parent or child matrix if you use `MAT_SHARE_NONZERO_PATTERN`.
4850: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4852: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the simple matrix data structure of mat
4853: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4854: User should not use `MatDuplicate()` to create new matrix M if M is intended to be reused as the product of matrix operation.
4856: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4857: @*/
4858: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4859: {
4860: Mat B;
4861: VecType vtype;
4862: PetscInt i;
4863: PetscObject dm, container_h, container_d;
4864: void (*viewf)(void);
4866: PetscFunctionBegin;
4869: PetscAssertPointer(M, 3);
4870: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4871: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4872: MatCheckPreallocated(mat, 1);
4874: *M = NULL;
4875: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4876: PetscUseTypeMethod(mat, duplicate, op, M);
4877: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4878: B = *M;
4880: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4881: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4882: PetscCall(MatGetVecType(mat, &vtype));
4883: PetscCall(MatSetVecType(B, vtype));
4885: B->stencil.dim = mat->stencil.dim;
4886: B->stencil.noc = mat->stencil.noc;
4887: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4888: B->stencil.dims[i] = mat->stencil.dims[i];
4889: B->stencil.starts[i] = mat->stencil.starts[i];
4890: }
4892: B->nooffproczerorows = mat->nooffproczerorows;
4893: B->nooffprocentries = mat->nooffprocentries;
4895: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
4896: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
4897: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
4898: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
4899: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
4900: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
4901: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4902: PetscFunctionReturn(PETSC_SUCCESS);
4903: }
4905: /*@
4906: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
4908: Logically Collective
4910: Input Parameter:
4911: . mat - the matrix
4913: Output Parameter:
4914: . v - the diagonal of the matrix
4916: Level: intermediate
4918: Note:
4919: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
4920: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
4921: is larger than `ndiag`, the values of the remaining entries are unspecified.
4923: Currently only correct in parallel for square matrices.
4925: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
4926: @*/
4927: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
4928: {
4929: PetscFunctionBegin;
4933: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4934: MatCheckPreallocated(mat, 1);
4935: if (PetscDefined(USE_DEBUG)) {
4936: PetscInt nv, row, col, ndiag;
4938: PetscCall(VecGetLocalSize(v, &nv));
4939: PetscCall(MatGetLocalSize(mat, &row, &col));
4940: ndiag = PetscMin(row, col);
4941: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
4942: }
4944: PetscUseTypeMethod(mat, getdiagonal, v);
4945: PetscCall(PetscObjectStateIncrease((PetscObject)v));
4946: PetscFunctionReturn(PETSC_SUCCESS);
4947: }
4949: /*@C
4950: MatGetRowMin - Gets the minimum value (of the real part) of each
4951: row of the matrix
4953: Logically Collective
4955: Input Parameter:
4956: . mat - the matrix
4958: Output Parameters:
4959: + v - the vector for storing the maximums
4960: - idx - the indices of the column found for each row (optional)
4962: Level: intermediate
4964: Note:
4965: The result of this call are the same as if one converted the matrix to dense format
4966: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
4968: This code is only implemented for a couple of matrix formats.
4970: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
4971: `MatGetRowMax()`
4972: @*/
4973: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
4974: {
4975: PetscFunctionBegin;
4979: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4981: if (!mat->cmap->N) {
4982: PetscCall(VecSet(v, PETSC_MAX_REAL));
4983: if (idx) {
4984: PetscInt i, m = mat->rmap->n;
4985: for (i = 0; i < m; i++) idx[i] = -1;
4986: }
4987: } else {
4988: MatCheckPreallocated(mat, 1);
4989: }
4990: PetscUseTypeMethod(mat, getrowmin, v, idx);
4991: PetscCall(PetscObjectStateIncrease((PetscObject)v));
4992: PetscFunctionReturn(PETSC_SUCCESS);
4993: }
4995: /*@C
4996: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
4997: row of the matrix
4999: Logically Collective
5001: Input Parameter:
5002: . mat - the matrix
5004: Output Parameters:
5005: + v - the vector for storing the minimums
5006: - idx - the indices of the column found for each row (or `NULL` if not needed)
5008: Level: intermediate
5010: Notes:
5011: if a row is completely empty or has only 0.0 values then the idx[] value for that
5012: row is 0 (the first column).
5014: This code is only implemented for a couple of matrix formats.
5016: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5017: @*/
5018: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5019: {
5020: PetscFunctionBegin;
5024: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5025: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5027: if (!mat->cmap->N) {
5028: PetscCall(VecSet(v, 0.0));
5029: if (idx) {
5030: PetscInt i, m = mat->rmap->n;
5031: for (i = 0; i < m; i++) idx[i] = -1;
5032: }
5033: } else {
5034: MatCheckPreallocated(mat, 1);
5035: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5036: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5037: }
5038: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5039: PetscFunctionReturn(PETSC_SUCCESS);
5040: }
5042: /*@C
5043: MatGetRowMax - Gets the maximum value (of the real part) of each
5044: row of the matrix
5046: Logically Collective
5048: Input Parameter:
5049: . mat - the matrix
5051: Output Parameters:
5052: + v - the vector for storing the maximums
5053: - idx - the indices of the column found for each row (optional)
5055: Level: intermediate
5057: Notes:
5058: The result of this call are the same as if one converted the matrix to dense format
5059: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5061: This code is only implemented for a couple of matrix formats.
5063: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5064: @*/
5065: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5066: {
5067: PetscFunctionBegin;
5071: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5073: if (!mat->cmap->N) {
5074: PetscCall(VecSet(v, PETSC_MIN_REAL));
5075: if (idx) {
5076: PetscInt i, m = mat->rmap->n;
5077: for (i = 0; i < m; i++) idx[i] = -1;
5078: }
5079: } else {
5080: MatCheckPreallocated(mat, 1);
5081: PetscUseTypeMethod(mat, getrowmax, v, idx);
5082: }
5083: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5084: PetscFunctionReturn(PETSC_SUCCESS);
5085: }
5087: /*@C
5088: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5089: row of the matrix
5091: Logically Collective
5093: Input Parameter:
5094: . mat - the matrix
5096: Output Parameters:
5097: + v - the vector for storing the maximums
5098: - idx - the indices of the column found for each row (or `NULL` if not needed)
5100: Level: intermediate
5102: Notes:
5103: if a row is completely empty or has only 0.0 values then the idx[] value for that
5104: row is 0 (the first column).
5106: This code is only implemented for a couple of matrix formats.
5108: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5109: @*/
5110: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5111: {
5112: PetscFunctionBegin;
5116: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5118: if (!mat->cmap->N) {
5119: PetscCall(VecSet(v, 0.0));
5120: if (idx) {
5121: PetscInt i, m = mat->rmap->n;
5122: for (i = 0; i < m; i++) idx[i] = -1;
5123: }
5124: } else {
5125: MatCheckPreallocated(mat, 1);
5126: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5127: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5128: }
5129: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5130: PetscFunctionReturn(PETSC_SUCCESS);
5131: }
5133: /*@
5134: MatGetRowSum - Gets the sum of each row of the matrix
5136: Logically or Neighborhood Collective
5138: Input Parameter:
5139: . mat - the matrix
5141: Output Parameter:
5142: . v - the vector for storing the sum of rows
5144: Level: intermediate
5146: Notes:
5147: This code is slow since it is not currently specialized for different formats
5149: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`
5150: @*/
5151: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5152: {
5153: Vec ones;
5155: PetscFunctionBegin;
5159: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5160: MatCheckPreallocated(mat, 1);
5161: PetscCall(MatCreateVecs(mat, &ones, NULL));
5162: PetscCall(VecSet(ones, 1.));
5163: PetscCall(MatMult(mat, ones, v));
5164: PetscCall(VecDestroy(&ones));
5165: PetscFunctionReturn(PETSC_SUCCESS);
5166: }
5168: /*@
5169: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5170: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5172: Collective
5174: Input Parameter:
5175: . mat - the matrix to provide the transpose
5177: Output Parameter:
5178: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5180: Level: advanced
5182: Note:
5183: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5184: routine allows bypassing that call.
5186: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5187: @*/
5188: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5189: {
5190: PetscContainer rB = NULL;
5191: MatParentState *rb = NULL;
5193: PetscFunctionBegin;
5194: PetscCall(PetscNew(&rb));
5195: rb->id = ((PetscObject)mat)->id;
5196: rb->state = 0;
5197: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5198: PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)B), &rB));
5199: PetscCall(PetscContainerSetPointer(rB, rb));
5200: PetscCall(PetscContainerSetUserDestroy(rB, PetscContainerUserDestroyDefault));
5201: PetscCall(PetscObjectCompose((PetscObject)B, "MatTransposeParent", (PetscObject)rB));
5202: PetscCall(PetscObjectDereference((PetscObject)rB));
5203: PetscFunctionReturn(PETSC_SUCCESS);
5204: }
5206: /*@
5207: MatTranspose - Computes an in-place or out-of-place transpose of a matrix.
5209: Collective
5211: Input Parameters:
5212: + mat - the matrix to transpose
5213: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5215: Output Parameter:
5216: . B - the transpose
5218: Level: intermediate
5220: Notes:
5221: If you use `MAT_INPLACE_MATRIX` then you must pass in &mat for B
5223: `MAT_REUSE_MATRIX` uses the B matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX`. If you already have a matrix to contain the
5224: transpose, call `MatTransposeSetPrecursor`(mat,B) before calling this routine.
5226: If the nonzero structure of mat changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5228: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose, but don't need the storage to be changed.
5230: If mat is unchanged from the last call this function returns immediately without recomputing the result
5232: If you only need the symbolic transpose, and not the numerical values, use `MatTransposeSymbolic()`
5234: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5235: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5236: @*/
5237: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5238: {
5239: PetscContainer rB = NULL;
5240: MatParentState *rb = NULL;
5242: PetscFunctionBegin;
5245: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5246: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5247: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5248: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5249: MatCheckPreallocated(mat, 1);
5250: if (reuse == MAT_REUSE_MATRIX) {
5251: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5252: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5253: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5254: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5255: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5256: }
5258: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5259: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5260: PetscUseTypeMethod(mat, transpose, reuse, B);
5261: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5262: }
5263: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5265: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5266: if (reuse != MAT_INPLACE_MATRIX) {
5267: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5268: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5269: rb->state = ((PetscObject)mat)->state;
5270: rb->nonzerostate = mat->nonzerostate;
5271: }
5272: PetscFunctionReturn(PETSC_SUCCESS);
5273: }
5275: /*@
5276: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5278: Collective
5280: Input Parameter:
5281: . A - the matrix to transpose
5283: Output Parameter:
5284: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5285: numerical portion.
5287: Level: intermediate
5289: Note:
5290: This is not supported for many matrix types, use `MatTranspose()` in those cases
5292: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5293: @*/
5294: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5295: {
5296: PetscFunctionBegin;
5299: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5300: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5301: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5302: PetscUseTypeMethod(A, transposesymbolic, B);
5303: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5305: PetscCall(MatTransposeSetPrecursor(A, *B));
5306: PetscFunctionReturn(PETSC_SUCCESS);
5307: }
5309: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5310: {
5311: PetscContainer rB;
5312: MatParentState *rb;
5314: PetscFunctionBegin;
5317: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5318: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5319: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5320: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5321: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5322: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5323: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5324: PetscFunctionReturn(PETSC_SUCCESS);
5325: }
5327: /*@
5328: MatIsTranspose - Test whether a matrix is another one's transpose,
5329: or its own, in which case it tests symmetry.
5331: Collective
5333: Input Parameters:
5334: + A - the matrix to test
5335: . B - the matrix to test against, this can equal the first parameter
5336: - tol - tolerance, differences between entries smaller than this are counted as zero
5338: Output Parameter:
5339: . flg - the result
5341: Level: intermediate
5343: Notes:
5344: Only available for `MATAIJ` matrices.
5346: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5347: test involves parallel copies of the block-offdiagonal parts of the matrix.
5349: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5350: @*/
5351: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5352: {
5353: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5355: PetscFunctionBegin;
5358: PetscAssertPointer(flg, 4);
5359: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5360: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5361: *flg = PETSC_FALSE;
5362: if (f && g) {
5363: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5364: PetscCall((*f)(A, B, tol, flg));
5365: } else {
5366: MatType mattype;
5368: PetscCall(MatGetType(f ? B : A, &mattype));
5369: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5370: }
5371: PetscFunctionReturn(PETSC_SUCCESS);
5372: }
5374: /*@
5375: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5377: Collective
5379: Input Parameters:
5380: + mat - the matrix to transpose and complex conjugate
5381: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5383: Output Parameter:
5384: . B - the Hermitian transpose
5386: Level: intermediate
5388: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5389: @*/
5390: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5391: {
5392: PetscFunctionBegin;
5393: PetscCall(MatTranspose(mat, reuse, B));
5394: #if defined(PETSC_USE_COMPLEX)
5395: PetscCall(MatConjugate(*B));
5396: #endif
5397: PetscFunctionReturn(PETSC_SUCCESS);
5398: }
5400: /*@
5401: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5403: Collective
5405: Input Parameters:
5406: + A - the matrix to test
5407: . B - the matrix to test against, this can equal the first parameter
5408: - tol - tolerance, differences between entries smaller than this are counted as zero
5410: Output Parameter:
5411: . flg - the result
5413: Level: intermediate
5415: Notes:
5416: Only available for `MATAIJ` matrices.
5418: The sequential algorithm
5419: has a running time of the order of the number of nonzeros; the parallel
5420: test involves parallel copies of the block-offdiagonal parts of the matrix.
5422: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5423: @*/
5424: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5425: {
5426: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5428: PetscFunctionBegin;
5431: PetscAssertPointer(flg, 4);
5432: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5433: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5434: if (f && g) {
5435: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5436: PetscCall((*f)(A, B, tol, flg));
5437: }
5438: PetscFunctionReturn(PETSC_SUCCESS);
5439: }
5441: /*@
5442: MatPermute - Creates a new matrix with rows and columns permuted from the
5443: original.
5445: Collective
5447: Input Parameters:
5448: + mat - the matrix to permute
5449: . row - row permutation, each processor supplies only the permutation for its rows
5450: - col - column permutation, each processor supplies only the permutation for its columns
5452: Output Parameter:
5453: . B - the permuted matrix
5455: Level: advanced
5457: Note:
5458: The index sets map from row/col of permuted matrix to row/col of original matrix.
5459: The index sets should be on the same communicator as mat and have the same local sizes.
5461: Developer Notes:
5462: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5463: exploit the fact that row and col are permutations, consider implementing the
5464: more general `MatCreateSubMatrix()` instead.
5466: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5467: @*/
5468: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5469: {
5470: PetscFunctionBegin;
5475: PetscAssertPointer(B, 4);
5476: PetscCheckSameComm(mat, 1, row, 2);
5477: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5478: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5479: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5480: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5481: MatCheckPreallocated(mat, 1);
5483: if (mat->ops->permute) {
5484: PetscUseTypeMethod(mat, permute, row, col, B);
5485: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5486: } else {
5487: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5488: }
5489: PetscFunctionReturn(PETSC_SUCCESS);
5490: }
5492: /*@
5493: MatEqual - Compares two matrices.
5495: Collective
5497: Input Parameters:
5498: + A - the first matrix
5499: - B - the second matrix
5501: Output Parameter:
5502: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5504: Level: intermediate
5506: .seealso: [](ch_matrices), `Mat`
5507: @*/
5508: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5509: {
5510: PetscFunctionBegin;
5515: PetscAssertPointer(flg, 3);
5516: PetscCheckSameComm(A, 1, B, 2);
5517: MatCheckPreallocated(A, 1);
5518: MatCheckPreallocated(B, 2);
5519: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5520: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5521: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5522: B->cmap->N);
5523: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5524: PetscUseTypeMethod(A, equal, B, flg);
5525: } else {
5526: PetscCall(MatMultEqual(A, B, 10, flg));
5527: }
5528: PetscFunctionReturn(PETSC_SUCCESS);
5529: }
5531: /*@
5532: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5533: matrices that are stored as vectors. Either of the two scaling
5534: matrices can be `NULL`.
5536: Collective
5538: Input Parameters:
5539: + mat - the matrix to be scaled
5540: . l - the left scaling vector (or `NULL`)
5541: - r - the right scaling vector (or `NULL`)
5543: Level: intermediate
5545: Note:
5546: `MatDiagonalScale()` computes A = LAR, where
5547: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5548: The L scales the rows of the matrix, the R scales the columns of the matrix.
5550: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5551: @*/
5552: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5553: {
5554: PetscFunctionBegin;
5557: if (l) {
5559: PetscCheckSameComm(mat, 1, l, 2);
5560: }
5561: if (r) {
5563: PetscCheckSameComm(mat, 1, r, 3);
5564: }
5565: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5566: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5567: MatCheckPreallocated(mat, 1);
5568: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5570: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5571: PetscUseTypeMethod(mat, diagonalscale, l, r);
5572: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5573: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5574: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5575: PetscFunctionReturn(PETSC_SUCCESS);
5576: }
5578: /*@
5579: MatScale - Scales all elements of a matrix by a given number.
5581: Logically Collective
5583: Input Parameters:
5584: + mat - the matrix to be scaled
5585: - a - the scaling value
5587: Level: intermediate
5589: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5590: @*/
5591: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5592: {
5593: PetscFunctionBegin;
5596: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5597: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5599: MatCheckPreallocated(mat, 1);
5601: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5602: if (a != (PetscScalar)1.0) {
5603: PetscUseTypeMethod(mat, scale, a);
5604: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5605: }
5606: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5607: PetscFunctionReturn(PETSC_SUCCESS);
5608: }
5610: /*@
5611: MatNorm - Calculates various norms of a matrix.
5613: Collective
5615: Input Parameters:
5616: + mat - the matrix
5617: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5619: Output Parameter:
5620: . nrm - the resulting norm
5622: Level: intermediate
5624: .seealso: [](ch_matrices), `Mat`
5625: @*/
5626: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5627: {
5628: PetscFunctionBegin;
5631: PetscAssertPointer(nrm, 3);
5633: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5634: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5635: MatCheckPreallocated(mat, 1);
5637: PetscUseTypeMethod(mat, norm, type, nrm);
5638: PetscFunctionReturn(PETSC_SUCCESS);
5639: }
5641: /*
5642: This variable is used to prevent counting of MatAssemblyBegin() that
5643: are called from within a MatAssemblyEnd().
5644: */
5645: static PetscInt MatAssemblyEnd_InUse = 0;
5646: /*@
5647: MatAssemblyBegin - Begins assembling the matrix. This routine should
5648: be called after completing all calls to `MatSetValues()`.
5650: Collective
5652: Input Parameters:
5653: + mat - the matrix
5654: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5656: Level: beginner
5658: Notes:
5659: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5660: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5662: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5663: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5664: using the matrix.
5666: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5667: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5668: a global collective operation requiring all processes that share the matrix.
5670: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5671: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5672: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5674: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5675: @*/
5676: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5677: {
5678: PetscFunctionBegin;
5681: MatCheckPreallocated(mat, 1);
5682: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix.\nDid you forget to call MatSetUnfactored()?");
5683: if (mat->assembled) {
5684: mat->was_assembled = PETSC_TRUE;
5685: mat->assembled = PETSC_FALSE;
5686: }
5688: if (!MatAssemblyEnd_InUse) {
5689: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5690: PetscTryTypeMethod(mat, assemblybegin, type);
5691: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5692: } else PetscTryTypeMethod(mat, assemblybegin, type);
5693: PetscFunctionReturn(PETSC_SUCCESS);
5694: }
5696: /*@
5697: MatAssembled - Indicates if a matrix has been assembled and is ready for
5698: use; for example, in matrix-vector product.
5700: Not Collective
5702: Input Parameter:
5703: . mat - the matrix
5705: Output Parameter:
5706: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5708: Level: advanced
5710: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5711: @*/
5712: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5713: {
5714: PetscFunctionBegin;
5716: PetscAssertPointer(assembled, 2);
5717: *assembled = mat->assembled;
5718: PetscFunctionReturn(PETSC_SUCCESS);
5719: }
5721: /*@
5722: MatAssemblyEnd - Completes assembling the matrix. This routine should
5723: be called after `MatAssemblyBegin()`.
5725: Collective
5727: Input Parameters:
5728: + mat - the matrix
5729: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5731: Options Database Keys:
5732: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5733: . -mat_view ::ascii_info_detail - Prints more detailed info
5734: . -mat_view - Prints matrix in ASCII format
5735: . -mat_view ::ascii_matlab - Prints matrix in Matlab format
5736: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5737: . -display <name> - Sets display name (default is host)
5738: . -draw_pause <sec> - Sets number of seconds to pause after display
5739: . -mat_view socket - Sends matrix to socket, can be accessed from Matlab (See [Using MATLAB with PETSc](ch_matlab))
5740: . -viewer_socket_machine <machine> - Machine to use for socket
5741: . -viewer_socket_port <port> - Port number to use for socket
5742: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5744: Level: beginner
5746: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5747: @*/
5748: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5749: {
5750: static PetscInt inassm = 0;
5751: PetscBool flg = PETSC_FALSE;
5753: PetscFunctionBegin;
5757: inassm++;
5758: MatAssemblyEnd_InUse++;
5759: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5760: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5761: PetscTryTypeMethod(mat, assemblyend, type);
5762: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5763: } else PetscTryTypeMethod(mat, assemblyend, type);
5765: /* Flush assembly is not a true assembly */
5766: if (type != MAT_FLUSH_ASSEMBLY) {
5767: if (mat->num_ass) {
5768: if (!mat->symmetry_eternal) {
5769: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5770: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5771: }
5772: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5773: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5774: }
5775: mat->num_ass++;
5776: mat->assembled = PETSC_TRUE;
5777: mat->ass_nonzerostate = mat->nonzerostate;
5778: }
5780: mat->insertmode = NOT_SET_VALUES;
5781: MatAssemblyEnd_InUse--;
5782: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5783: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5784: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5786: if (mat->checksymmetryonassembly) {
5787: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5788: if (flg) {
5789: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5790: } else {
5791: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5792: }
5793: }
5794: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5795: }
5796: inassm--;
5797: PetscFunctionReturn(PETSC_SUCCESS);
5798: }
5800: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5801: /*@
5802: MatSetOption - Sets a parameter option for a matrix. Some options
5803: may be specific to certain storage formats. Some options
5804: determine how values will be inserted (or added). Sorted,
5805: row-oriented input will generally assemble the fastest. The default
5806: is row-oriented.
5808: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5810: Input Parameters:
5811: + mat - the matrix
5812: . op - the option, one of those listed below (and possibly others),
5813: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5815: Options Describing Matrix Structure:
5816: + `MAT_SPD` - symmetric positive definite
5817: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5818: . `MAT_HERMITIAN` - transpose is the complex conjugation
5819: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5820: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5821: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5822: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5824: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5825: do not need to be computed (usually at a high cost)
5827: Options For Use with `MatSetValues()`:
5828: Insert a logically dense subblock, which can be
5829: . `MAT_ROW_ORIENTED` - row-oriented (default)
5831: These options reflect the data you pass in with `MatSetValues()`; it has
5832: nothing to do with how the data is stored internally in the matrix
5833: data structure.
5835: When (re)assembling a matrix, we can restrict the input for
5836: efficiency/debugging purposes. These options include
5837: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5838: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5839: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5840: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5841: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5842: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5843: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5844: performance for very large process counts.
5845: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5846: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5847: functions, instead sending only neighbor messages.
5849: Level: intermediate
5851: Notes:
5852: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
5854: Some options are relevant only for particular matrix types and
5855: are thus ignored by others. Other options are not supported by
5856: certain matrix types and will generate an error message if set.
5858: If using Fortran to compute a matrix, one may need to
5859: use the column-oriented option (or convert to the row-oriented
5860: format).
5862: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
5863: that would generate a new entry in the nonzero structure is instead
5864: ignored. Thus, if memory has not already been allocated for this particular
5865: data, then the insertion is ignored. For dense matrices, in which
5866: the entire array is allocated, no entries are ever ignored.
5867: Set after the first `MatAssemblyEnd()`. If this option is set then the MatAssemblyBegin/End() processes has one less global reduction
5869: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
5870: that would generate a new entry in the nonzero structure instead produces
5871: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5873: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
5874: that would generate a new entry that has not been preallocated will
5875: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
5876: only.) This is a useful flag when debugging matrix memory preallocation.
5877: If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5879: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
5880: other processors should be dropped, rather than stashed.
5881: This is useful if you know that the "owning" processor is also
5882: always generating the correct matrix entries, so that PETSc need
5883: not transfer duplicate entries generated on another processor.
5885: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
5886: searches during matrix assembly. When this flag is set, the hash table
5887: is created during the first matrix assembly. This hash table is
5888: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
5889: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
5890: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
5891: supported by `MATMPIBAIJ` format only.
5893: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
5894: are kept in the nonzero structure
5896: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
5897: a zero location in the matrix
5899: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
5901: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
5902: zero row routines and thus improves performance for very large process counts.
5904: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
5905: part of the matrix (since they should match the upper triangular part).
5907: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
5908: single call to `MatSetValues()`, preallocation is perfect, row oriented, `INSERT_VALUES` is used. Common
5909: with finite difference schemes with non-periodic boundary conditions.
5911: Developer Notes:
5912: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
5913: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
5914: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
5915: not changed.
5917: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
5918: @*/
5919: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
5920: {
5921: PetscFunctionBegin;
5923: if (op > 0) {
5926: }
5928: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
5930: switch (op) {
5931: case MAT_FORCE_DIAGONAL_ENTRIES:
5932: mat->force_diagonals = flg;
5933: PetscFunctionReturn(PETSC_SUCCESS);
5934: case MAT_NO_OFF_PROC_ENTRIES:
5935: mat->nooffprocentries = flg;
5936: PetscFunctionReturn(PETSC_SUCCESS);
5937: case MAT_SUBSET_OFF_PROC_ENTRIES:
5938: mat->assembly_subset = flg;
5939: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
5940: #if !defined(PETSC_HAVE_MPIUNI)
5941: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
5942: #endif
5943: mat->stash.first_assembly_done = PETSC_FALSE;
5944: }
5945: PetscFunctionReturn(PETSC_SUCCESS);
5946: case MAT_NO_OFF_PROC_ZERO_ROWS:
5947: mat->nooffproczerorows = flg;
5948: PetscFunctionReturn(PETSC_SUCCESS);
5949: case MAT_SPD:
5950: if (flg) {
5951: mat->spd = PETSC_BOOL3_TRUE;
5952: mat->symmetric = PETSC_BOOL3_TRUE;
5953: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
5954: } else {
5955: mat->spd = PETSC_BOOL3_FALSE;
5956: }
5957: break;
5958: case MAT_SYMMETRIC:
5959: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5960: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
5961: #if !defined(PETSC_USE_COMPLEX)
5962: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5963: #endif
5964: break;
5965: case MAT_HERMITIAN:
5966: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5967: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
5968: #if !defined(PETSC_USE_COMPLEX)
5969: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5970: #endif
5971: break;
5972: case MAT_STRUCTURALLY_SYMMETRIC:
5973: mat->structurally_symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5974: break;
5975: case MAT_SYMMETRY_ETERNAL:
5976: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
5977: mat->symmetry_eternal = flg;
5978: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
5979: break;
5980: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
5981: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
5982: mat->structural_symmetry_eternal = flg;
5983: break;
5984: case MAT_SPD_ETERNAL:
5985: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
5986: mat->spd_eternal = flg;
5987: if (flg) {
5988: mat->structural_symmetry_eternal = PETSC_TRUE;
5989: mat->symmetry_eternal = PETSC_TRUE;
5990: }
5991: break;
5992: case MAT_STRUCTURE_ONLY:
5993: mat->structure_only = flg;
5994: break;
5995: case MAT_SORTED_FULL:
5996: mat->sortedfull = flg;
5997: break;
5998: default:
5999: break;
6000: }
6001: PetscTryTypeMethod(mat, setoption, op, flg);
6002: PetscFunctionReturn(PETSC_SUCCESS);
6003: }
6005: /*@
6006: MatGetOption - Gets a parameter option that has been set for a matrix.
6008: Logically Collective
6010: Input Parameters:
6011: + mat - the matrix
6012: - op - the option, this only responds to certain options, check the code for which ones
6014: Output Parameter:
6015: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6017: Level: intermediate
6019: Notes:
6020: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6022: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6023: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6025: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6026: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6027: @*/
6028: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6029: {
6030: PetscFunctionBegin;
6034: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6035: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6037: switch (op) {
6038: case MAT_NO_OFF_PROC_ENTRIES:
6039: *flg = mat->nooffprocentries;
6040: break;
6041: case MAT_NO_OFF_PROC_ZERO_ROWS:
6042: *flg = mat->nooffproczerorows;
6043: break;
6044: case MAT_SYMMETRIC:
6045: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6046: break;
6047: case MAT_HERMITIAN:
6048: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6049: break;
6050: case MAT_STRUCTURALLY_SYMMETRIC:
6051: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6052: break;
6053: case MAT_SPD:
6054: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6055: break;
6056: case MAT_SYMMETRY_ETERNAL:
6057: *flg = mat->symmetry_eternal;
6058: break;
6059: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6060: *flg = mat->symmetry_eternal;
6061: break;
6062: default:
6063: break;
6064: }
6065: PetscFunctionReturn(PETSC_SUCCESS);
6066: }
6068: /*@
6069: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6070: this routine retains the old nonzero structure.
6072: Logically Collective
6074: Input Parameter:
6075: . mat - the matrix
6077: Level: intermediate
6079: Note:
6080: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6081: See the Performance chapter of the users manual for information on preallocating matrices.
6083: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6084: @*/
6085: PetscErrorCode MatZeroEntries(Mat mat)
6086: {
6087: PetscFunctionBegin;
6090: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6091: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6092: MatCheckPreallocated(mat, 1);
6094: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6095: PetscUseTypeMethod(mat, zeroentries);
6096: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6097: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6098: PetscFunctionReturn(PETSC_SUCCESS);
6099: }
6101: /*@
6102: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6103: of a set of rows and columns of a matrix.
6105: Collective
6107: Input Parameters:
6108: + mat - the matrix
6109: . numRows - the number of rows/columns to zero
6110: . rows - the global row indices
6111: . diag - value put in the diagonal of the eliminated rows
6112: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6113: - b - optional vector of the right hand side, that will be adjusted by provided solution entries
6115: Level: intermediate
6117: Notes:
6118: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6120: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6121: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6123: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6124: Krylov method to take advantage of the known solution on the zeroed rows.
6126: For the parallel case, all processes that share the matrix (i.e.,
6127: those in the communicator used for matrix creation) MUST call this
6128: routine, regardless of whether any rows being zeroed are owned by
6129: them.
6131: Unlike `MatZeroRows()` this does not change the nonzero structure of the matrix, it merely zeros those entries in the matrix.
6133: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6134: list only rows local to itself).
6136: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6138: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6139: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6140: @*/
6141: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6142: {
6143: PetscFunctionBegin;
6146: if (numRows) PetscAssertPointer(rows, 3);
6147: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6148: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6149: MatCheckPreallocated(mat, 1);
6151: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6152: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6153: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6154: PetscFunctionReturn(PETSC_SUCCESS);
6155: }
6157: /*@
6158: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6159: of a set of rows and columns of a matrix.
6161: Collective
6163: Input Parameters:
6164: + mat - the matrix
6165: . is - the rows to zero
6166: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6167: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6168: - b - optional vector of right hand side, that will be adjusted by provided solution
6170: Level: intermediate
6172: Note:
6173: See `MatZeroRowsColumns()` for details on how this routine operates.
6175: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6176: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6177: @*/
6178: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6179: {
6180: PetscInt numRows;
6181: const PetscInt *rows;
6183: PetscFunctionBegin;
6188: PetscCall(ISGetLocalSize(is, &numRows));
6189: PetscCall(ISGetIndices(is, &rows));
6190: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6191: PetscCall(ISRestoreIndices(is, &rows));
6192: PetscFunctionReturn(PETSC_SUCCESS);
6193: }
6195: /*@
6196: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6197: of a set of rows of a matrix.
6199: Collective
6201: Input Parameters:
6202: + mat - the matrix
6203: . numRows - the number of rows to zero
6204: . rows - the global row indices
6205: . diag - value put in the diagonal of the zeroed rows
6206: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6207: - b - optional vector of right hand side, that will be adjusted by provided solution entries
6209: Level: intermediate
6211: Notes:
6212: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6214: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6216: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6217: Krylov method to take advantage of the known solution on the zeroed rows.
6219: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6220: from the matrix.
6222: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6223: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense and block diagonal
6224: formats this does not alter the nonzero structure.
6226: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6227: of the matrix is not changed the values are
6228: merely zeroed.
6230: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6231: formats can optionally remove the main diagonal entry from the
6232: nonzero structure as well, by passing 0.0 as the final argument).
6234: For the parallel case, all processes that share the matrix (i.e.,
6235: those in the communicator used for matrix creation) MUST call this
6236: routine, regardless of whether any rows being zeroed are owned by
6237: them.
6239: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6240: list only rows local to itself).
6242: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6243: owns that are to be zeroed. This saves a global synchronization in the implementation.
6245: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6246: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`
6247: @*/
6248: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6249: {
6250: PetscFunctionBegin;
6253: if (numRows) PetscAssertPointer(rows, 3);
6254: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6255: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6256: MatCheckPreallocated(mat, 1);
6258: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6259: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6260: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6261: PetscFunctionReturn(PETSC_SUCCESS);
6262: }
6264: /*@
6265: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6266: of a set of rows of a matrix.
6268: Collective
6270: Input Parameters:
6271: + mat - the matrix
6272: . is - index set of rows to remove (if `NULL` then no row is removed)
6273: . diag - value put in all diagonals of eliminated rows
6274: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6275: - b - optional vector of right hand side, that will be adjusted by provided solution
6277: Level: intermediate
6279: Note:
6280: See `MatZeroRows()` for details on how this routine operates.
6282: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6283: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6284: @*/
6285: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6286: {
6287: PetscInt numRows = 0;
6288: const PetscInt *rows = NULL;
6290: PetscFunctionBegin;
6293: if (is) {
6295: PetscCall(ISGetLocalSize(is, &numRows));
6296: PetscCall(ISGetIndices(is, &rows));
6297: }
6298: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6299: if (is) PetscCall(ISRestoreIndices(is, &rows));
6300: PetscFunctionReturn(PETSC_SUCCESS);
6301: }
6303: /*@
6304: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6305: of a set of rows of a matrix. These rows must be local to the process.
6307: Collective
6309: Input Parameters:
6310: + mat - the matrix
6311: . numRows - the number of rows to remove
6312: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6313: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6314: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6315: - b - optional vector of right hand side, that will be adjusted by provided solution
6317: Level: intermediate
6319: Notes:
6320: See `MatZeroRows()` for details on how this routine operates.
6322: The grid coordinates are across the entire grid, not just the local portion
6324: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6325: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6326: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6327: `DM_BOUNDARY_PERIODIC` boundary type.
6329: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6330: a single value per point) you can skip filling those indices.
6332: Fortran Notes:
6333: `idxm` and `idxn` should be declared as
6334: $ MatStencil idxm(4, m)
6335: and the values inserted using
6336: .vb
6337: idxm(MatStencil_i, 1) = i
6338: idxm(MatStencil_j, 1) = j
6339: idxm(MatStencil_k, 1) = k
6340: idxm(MatStencil_c, 1) = c
6341: etc
6342: .ve
6344: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsl()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6345: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6346: @*/
6347: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6348: {
6349: PetscInt dim = mat->stencil.dim;
6350: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6351: PetscInt *dims = mat->stencil.dims + 1;
6352: PetscInt *starts = mat->stencil.starts;
6353: PetscInt *dxm = (PetscInt *)rows;
6354: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6356: PetscFunctionBegin;
6359: if (numRows) PetscAssertPointer(rows, 3);
6361: PetscCall(PetscMalloc1(numRows, &jdxm));
6362: for (i = 0; i < numRows; ++i) {
6363: /* Skip unused dimensions (they are ordered k, j, i, c) */
6364: for (j = 0; j < 3 - sdim; ++j) dxm++;
6365: /* Local index in X dir */
6366: tmp = *dxm++ - starts[0];
6367: /* Loop over remaining dimensions */
6368: for (j = 0; j < dim - 1; ++j) {
6369: /* If nonlocal, set index to be negative */
6370: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6371: /* Update local index */
6372: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6373: }
6374: /* Skip component slot if necessary */
6375: if (mat->stencil.noc) dxm++;
6376: /* Local row number */
6377: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6378: }
6379: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6380: PetscCall(PetscFree(jdxm));
6381: PetscFunctionReturn(PETSC_SUCCESS);
6382: }
6384: /*@
6385: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6386: of a set of rows and columns of a matrix.
6388: Collective
6390: Input Parameters:
6391: + mat - the matrix
6392: . numRows - the number of rows/columns to remove
6393: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6394: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6395: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6396: - b - optional vector of right hand side, that will be adjusted by provided solution
6398: Level: intermediate
6400: Notes:
6401: See `MatZeroRowsColumns()` for details on how this routine operates.
6403: The grid coordinates are across the entire grid, not just the local portion
6405: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6406: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6407: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6408: `DM_BOUNDARY_PERIODIC` boundary type.
6410: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6411: a single value per point) you can skip filling those indices.
6413: Fortran Notes:
6414: `idxm` and `idxn` should be declared as
6415: $ MatStencil idxm(4, m)
6416: and the values inserted using
6417: .vb
6418: idxm(MatStencil_i, 1) = i
6419: idxm(MatStencil_j, 1) = j
6420: idxm(MatStencil_k, 1) = k
6421: idxm(MatStencil_c, 1) = c
6422: etc
6423: .ve
6425: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6426: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6427: @*/
6428: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6429: {
6430: PetscInt dim = mat->stencil.dim;
6431: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6432: PetscInt *dims = mat->stencil.dims + 1;
6433: PetscInt *starts = mat->stencil.starts;
6434: PetscInt *dxm = (PetscInt *)rows;
6435: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6437: PetscFunctionBegin;
6440: if (numRows) PetscAssertPointer(rows, 3);
6442: PetscCall(PetscMalloc1(numRows, &jdxm));
6443: for (i = 0; i < numRows; ++i) {
6444: /* Skip unused dimensions (they are ordered k, j, i, c) */
6445: for (j = 0; j < 3 - sdim; ++j) dxm++;
6446: /* Local index in X dir */
6447: tmp = *dxm++ - starts[0];
6448: /* Loop over remaining dimensions */
6449: for (j = 0; j < dim - 1; ++j) {
6450: /* If nonlocal, set index to be negative */
6451: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6452: /* Update local index */
6453: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6454: }
6455: /* Skip component slot if necessary */
6456: if (mat->stencil.noc) dxm++;
6457: /* Local row number */
6458: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6459: }
6460: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6461: PetscCall(PetscFree(jdxm));
6462: PetscFunctionReturn(PETSC_SUCCESS);
6463: }
6465: /*@C
6466: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6467: of a set of rows of a matrix; using local numbering of rows.
6469: Collective
6471: Input Parameters:
6472: + mat - the matrix
6473: . numRows - the number of rows to remove
6474: . rows - the local row indices
6475: . diag - value put in all diagonals of eliminated rows
6476: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6477: - b - optional vector of right hand side, that will be adjusted by provided solution
6479: Level: intermediate
6481: Notes:
6482: Before calling `MatZeroRowsLocal()`, the user must first set the
6483: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6485: See `MatZeroRows()` for details on how this routine operates.
6487: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6488: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6489: @*/
6490: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6491: {
6492: PetscFunctionBegin;
6495: if (numRows) PetscAssertPointer(rows, 3);
6496: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6497: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6498: MatCheckPreallocated(mat, 1);
6500: if (mat->ops->zerorowslocal) {
6501: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6502: } else {
6503: IS is, newis;
6504: const PetscInt *newRows;
6506: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6507: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6508: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6509: PetscCall(ISGetIndices(newis, &newRows));
6510: PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6511: PetscCall(ISRestoreIndices(newis, &newRows));
6512: PetscCall(ISDestroy(&newis));
6513: PetscCall(ISDestroy(&is));
6514: }
6515: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6516: PetscFunctionReturn(PETSC_SUCCESS);
6517: }
6519: /*@
6520: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6521: of a set of rows of a matrix; using local numbering of rows.
6523: Collective
6525: Input Parameters:
6526: + mat - the matrix
6527: . is - index set of rows to remove
6528: . diag - value put in all diagonals of eliminated rows
6529: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6530: - b - optional vector of right hand side, that will be adjusted by provided solution
6532: Level: intermediate
6534: Notes:
6535: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6536: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6538: See `MatZeroRows()` for details on how this routine operates.
6540: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6541: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6542: @*/
6543: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6544: {
6545: PetscInt numRows;
6546: const PetscInt *rows;
6548: PetscFunctionBegin;
6552: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6553: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6554: MatCheckPreallocated(mat, 1);
6556: PetscCall(ISGetLocalSize(is, &numRows));
6557: PetscCall(ISGetIndices(is, &rows));
6558: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6559: PetscCall(ISRestoreIndices(is, &rows));
6560: PetscFunctionReturn(PETSC_SUCCESS);
6561: }
6563: /*@
6564: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6565: of a set of rows and columns of a matrix; using local numbering of rows.
6567: Collective
6569: Input Parameters:
6570: + mat - the matrix
6571: . numRows - the number of rows to remove
6572: . rows - the global row indices
6573: . diag - value put in all diagonals of eliminated rows
6574: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6575: - b - optional vector of right hand side, that will be adjusted by provided solution
6577: Level: intermediate
6579: Notes:
6580: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6581: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6583: See `MatZeroRowsColumns()` for details on how this routine operates.
6585: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6586: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6587: @*/
6588: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6589: {
6590: IS is, newis;
6591: const PetscInt *newRows;
6593: PetscFunctionBegin;
6596: if (numRows) PetscAssertPointer(rows, 3);
6597: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6598: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6599: MatCheckPreallocated(mat, 1);
6601: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6602: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6603: PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6604: PetscCall(ISGetIndices(newis, &newRows));
6605: PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6606: PetscCall(ISRestoreIndices(newis, &newRows));
6607: PetscCall(ISDestroy(&newis));
6608: PetscCall(ISDestroy(&is));
6609: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6610: PetscFunctionReturn(PETSC_SUCCESS);
6611: }
6613: /*@
6614: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6615: of a set of rows and columns of a matrix; using local numbering of rows.
6617: Collective
6619: Input Parameters:
6620: + mat - the matrix
6621: . is - index set of rows to remove
6622: . diag - value put in all diagonals of eliminated rows
6623: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6624: - b - optional vector of right hand side, that will be adjusted by provided solution
6626: Level: intermediate
6628: Notes:
6629: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6630: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6632: See `MatZeroRowsColumns()` for details on how this routine operates.
6634: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6635: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6636: @*/
6637: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6638: {
6639: PetscInt numRows;
6640: const PetscInt *rows;
6642: PetscFunctionBegin;
6646: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6647: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6648: MatCheckPreallocated(mat, 1);
6650: PetscCall(ISGetLocalSize(is, &numRows));
6651: PetscCall(ISGetIndices(is, &rows));
6652: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6653: PetscCall(ISRestoreIndices(is, &rows));
6654: PetscFunctionReturn(PETSC_SUCCESS);
6655: }
6657: /*@C
6658: MatGetSize - Returns the numbers of rows and columns in a matrix.
6660: Not Collective
6662: Input Parameter:
6663: . mat - the matrix
6665: Output Parameters:
6666: + m - the number of global rows
6667: - n - the number of global columns
6669: Level: beginner
6671: Note:
6672: Both output parameters can be `NULL` on input.
6674: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6675: @*/
6676: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6677: {
6678: PetscFunctionBegin;
6680: if (m) *m = mat->rmap->N;
6681: if (n) *n = mat->cmap->N;
6682: PetscFunctionReturn(PETSC_SUCCESS);
6683: }
6685: /*@C
6686: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6687: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6689: Not Collective
6691: Input Parameter:
6692: . mat - the matrix
6694: Output Parameters:
6695: + m - the number of local rows, use `NULL` to not obtain this value
6696: - n - the number of local columns, use `NULL` to not obtain this value
6698: Level: beginner
6700: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6701: @*/
6702: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6703: {
6704: PetscFunctionBegin;
6706: if (m) PetscAssertPointer(m, 2);
6707: if (n) PetscAssertPointer(n, 3);
6708: if (m) *m = mat->rmap->n;
6709: if (n) *n = mat->cmap->n;
6710: PetscFunctionReturn(PETSC_SUCCESS);
6711: }
6713: /*@C
6714: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6715: vector one multiplies this matrix by that are owned by this processor.
6717: Not Collective, unless matrix has not been allocated, then collective
6719: Input Parameter:
6720: . mat - the matrix
6722: Output Parameters:
6723: + m - the global index of the first local column, use `NULL` to not obtain this value
6724: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6726: Level: developer
6728: Notes:
6729: Retursns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6730: Layouts](sec_matlayout) for details on matrix layouts.
6732: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
6733: @*/
6734: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6735: {
6736: PetscFunctionBegin;
6739: if (m) PetscAssertPointer(m, 2);
6740: if (n) PetscAssertPointer(n, 3);
6741: MatCheckPreallocated(mat, 1);
6742: if (m) *m = mat->cmap->rstart;
6743: if (n) *n = mat->cmap->rend;
6744: PetscFunctionReturn(PETSC_SUCCESS);
6745: }
6747: /*@C
6748: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6749: this MPI process.
6751: Not Collective
6753: Input Parameter:
6754: . mat - the matrix
6756: Output Parameters:
6757: + m - the global index of the first local row, use `NULL` to not obtain this value
6758: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6760: Level: beginner
6762: Note:
6763: For all matrices it returns the range of matrix rows associated with rows of a vector that
6764: would contain the result of a matrix vector product with this matrix. See [Matrix
6765: Layouts](sec_matlayout) for details on matrix layouts.
6767: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`,
6768: `PetscLayout`
6769: @*/
6770: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6771: {
6772: PetscFunctionBegin;
6775: if (m) PetscAssertPointer(m, 2);
6776: if (n) PetscAssertPointer(n, 3);
6777: MatCheckPreallocated(mat, 1);
6778: if (m) *m = mat->rmap->rstart;
6779: if (n) *n = mat->rmap->rend;
6780: PetscFunctionReturn(PETSC_SUCCESS);
6781: }
6783: /*@C
6784: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6785: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6787: Not Collective, unless matrix has not been allocated
6789: Input Parameter:
6790: . mat - the matrix
6792: Output Parameter:
6793: . ranges - start of each processors portion plus one more than the total length at the end
6795: Level: beginner
6797: Notes:
6798: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
6799: would contain the result of a matrix vector product with this matrix. See [Matrix
6800: Layouts](sec_matlayout) for details on matrix layouts.
6802: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
6803: @*/
6804: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt **ranges)
6805: {
6806: PetscFunctionBegin;
6809: MatCheckPreallocated(mat, 1);
6810: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6811: PetscFunctionReturn(PETSC_SUCCESS);
6812: }
6814: /*@C
6815: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
6816: vector one multiplies this vector by that are owned by each processor.
6818: Not Collective, unless matrix has not been allocated
6820: Input Parameter:
6821: . mat - the matrix
6823: Output Parameter:
6824: . ranges - start of each processors portion plus one more then the total length at the end
6826: Level: beginner
6828: Notes:
6829: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
6830: Layouts](sec_matlayout) for details on matrix layouts.
6832: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`
6833: @*/
6834: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt **ranges)
6835: {
6836: PetscFunctionBegin;
6839: MatCheckPreallocated(mat, 1);
6840: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
6841: PetscFunctionReturn(PETSC_SUCCESS);
6842: }
6844: /*@C
6845: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
6847: Not Collective
6849: Input Parameter:
6850: . A - matrix
6852: Output Parameters:
6853: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
6854: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
6856: Level: intermediate
6858: Notes:
6859: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
6860: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
6861: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
6862: details on matrix layouts.
6864: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatSetValues()`, ``MATELEMENTAL``, ``MATSCALAPACK``
6865: @*/
6866: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
6867: {
6868: PetscErrorCode (*f)(Mat, IS *, IS *);
6870: PetscFunctionBegin;
6871: MatCheckPreallocated(A, 1);
6872: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
6873: if (f) {
6874: PetscCall((*f)(A, rows, cols));
6875: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
6876: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
6877: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
6878: }
6879: PetscFunctionReturn(PETSC_SUCCESS);
6880: }
6882: /*@C
6883: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
6884: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
6885: to complete the factorization.
6887: Collective
6889: Input Parameters:
6890: + fact - the factorized matrix obtained with `MatGetFactor()`
6891: . mat - the matrix
6892: . row - row permutation
6893: . col - column permutation
6894: - info - structure containing
6895: .vb
6896: levels - number of levels of fill.
6897: expected fill - as ratio of original fill.
6898: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
6899: missing diagonal entries)
6900: .ve
6902: Level: developer
6904: Notes:
6905: See [Matrix Factorization](sec_matfactor) for additional information.
6907: Most users should employ the `KSP` interface for linear solvers
6908: instead of working directly with matrix algebra routines such as this.
6909: See, e.g., `KSPCreate()`.
6911: Uses the definition of level of fill as in Y. Saad, 2003
6913: Developer Notes:
6914: The Fortran interface is not autogenerated as the
6915: interface definition cannot be generated correctly [due to `MatFactorInfo`]
6917: References:
6918: . * - Y. Saad, Iterative methods for sparse linear systems Philadelphia: Society for Industrial and Applied Mathematics, 2003
6920: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
6921: `MatGetOrdering()`, `MatFactorInfo`
6922: @*/
6923: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
6924: {
6925: PetscFunctionBegin;
6930: PetscAssertPointer(info, 5);
6931: PetscAssertPointer(fact, 1);
6932: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
6933: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
6934: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6935: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6936: MatCheckPreallocated(mat, 2);
6938: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
6939: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
6940: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
6941: PetscFunctionReturn(PETSC_SUCCESS);
6942: }
6944: /*@C
6945: MatICCFactorSymbolic - Performs symbolic incomplete
6946: Cholesky factorization for a symmetric matrix. Use
6947: `MatCholeskyFactorNumeric()` to complete the factorization.
6949: Collective
6951: Input Parameters:
6952: + fact - the factorized matrix obtained with `MatGetFactor()`
6953: . mat - the matrix to be factored
6954: . perm - row and column permutation
6955: - info - structure containing
6956: .vb
6957: levels - number of levels of fill.
6958: expected fill - as ratio of original fill.
6959: .ve
6961: Level: developer
6963: Notes:
6964: Most users should employ the `KSP` interface for linear solvers
6965: instead of working directly with matrix algebra routines such as this.
6966: See, e.g., `KSPCreate()`.
6968: This uses the definition of level of fill as in Y. Saad, 2003
6970: Developer Notes:
6971: The Fortran interface is not autogenerated as the
6972: interface definition cannot be generated correctly [due to `MatFactorInfo`]
6974: References:
6975: . * - Y. Saad, Iterative methods for sparse linear systems Philadelphia: Society for Industrial and Applied Mathematics, 2003
6977: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
6978: @*/
6979: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
6980: {
6981: PetscFunctionBegin;
6985: PetscAssertPointer(info, 4);
6986: PetscAssertPointer(fact, 1);
6987: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6988: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
6989: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
6990: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6991: MatCheckPreallocated(mat, 2);
6993: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
6994: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
6995: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
6996: PetscFunctionReturn(PETSC_SUCCESS);
6997: }
6999: /*@C
7000: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7001: points to an array of valid matrices, they may be reused to store the new
7002: submatrices.
7004: Collective
7006: Input Parameters:
7007: + mat - the matrix
7008: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7009: . irow - index set of rows to extract
7010: . icol - index set of columns to extract
7011: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7013: Output Parameter:
7014: . submat - the array of submatrices
7016: Level: advanced
7018: Notes:
7019: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7020: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7021: to extract a parallel submatrix.
7023: Some matrix types place restrictions on the row and column
7024: indices, such as that they be sorted or that they be equal to each other.
7026: The index sets may not have duplicate entries.
7028: When extracting submatrices from a parallel matrix, each processor can
7029: form a different submatrix by setting the rows and columns of its
7030: individual index sets according to the local submatrix desired.
7032: When finished using the submatrices, the user should destroy
7033: them with `MatDestroySubMatrices()`.
7035: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7036: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7038: This routine creates the matrices in submat; you should NOT create them before
7039: calling it. It also allocates the array of matrix pointers submat.
7041: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7042: request one row/column in a block, they must request all rows/columns that are in
7043: that block. For example, if the block size is 2 you cannot request just row 0 and
7044: column 0.
7046: Fortran Notes:
7047: The Fortran interface is slightly different from that given below; it
7048: requires one to pass in as `submat` a `Mat` (integer) array of size at least n+1.
7050: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7051: @*/
7052: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7053: {
7054: PetscInt i;
7055: PetscBool eq;
7057: PetscFunctionBegin;
7060: if (n) {
7061: PetscAssertPointer(irow, 3);
7063: PetscAssertPointer(icol, 4);
7065: }
7066: PetscAssertPointer(submat, 6);
7067: if (n && scall == MAT_REUSE_MATRIX) {
7068: PetscAssertPointer(*submat, 6);
7070: }
7071: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7072: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7073: MatCheckPreallocated(mat, 1);
7074: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7075: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7076: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7077: for (i = 0; i < n; i++) {
7078: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7079: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7080: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7081: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7082: if (mat->boundtocpu && mat->bindingpropagates) {
7083: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7084: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7085: }
7086: #endif
7087: }
7088: PetscFunctionReturn(PETSC_SUCCESS);
7089: }
7091: /*@C
7092: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of `IS` that may live on subcomms).
7094: Collective
7096: Input Parameters:
7097: + mat - the matrix
7098: . n - the number of submatrixes to be extracted
7099: . irow - index set of rows to extract
7100: . icol - index set of columns to extract
7101: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7103: Output Parameter:
7104: . submat - the array of submatrices
7106: Level: advanced
7108: Note:
7109: This is used by `PCGASM`
7111: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7112: @*/
7113: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7114: {
7115: PetscInt i;
7116: PetscBool eq;
7118: PetscFunctionBegin;
7121: if (n) {
7122: PetscAssertPointer(irow, 3);
7124: PetscAssertPointer(icol, 4);
7126: }
7127: PetscAssertPointer(submat, 6);
7128: if (n && scall == MAT_REUSE_MATRIX) {
7129: PetscAssertPointer(*submat, 6);
7131: }
7132: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7133: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7134: MatCheckPreallocated(mat, 1);
7136: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7137: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7138: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7139: for (i = 0; i < n; i++) {
7140: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7141: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7142: }
7143: PetscFunctionReturn(PETSC_SUCCESS);
7144: }
7146: /*@C
7147: MatDestroyMatrices - Destroys an array of matrices.
7149: Collective
7151: Input Parameters:
7152: + n - the number of local matrices
7153: - mat - the matrices (this is a pointer to the array of matrices)
7155: Level: advanced
7157: Note:
7158: Frees not only the matrices, but also the array that contains the matrices
7160: Fortran Notes:
7161: This does not free the array.
7163: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()` `MatDestroySubMatrices()`
7164: @*/
7165: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7166: {
7167: PetscInt i;
7169: PetscFunctionBegin;
7170: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7171: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7172: PetscAssertPointer(mat, 2);
7174: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7176: /* memory is allocated even if n = 0 */
7177: PetscCall(PetscFree(*mat));
7178: PetscFunctionReturn(PETSC_SUCCESS);
7179: }
7181: /*@C
7182: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7184: Collective
7186: Input Parameters:
7187: + n - the number of local matrices
7188: - mat - the matrices (this is a pointer to the array of matrices, just to match the calling
7189: sequence of `MatCreateSubMatrices()`)
7191: Level: advanced
7193: Note:
7194: Frees not only the matrices, but also the array that contains the matrices
7196: Fortran Notes:
7197: This does not free the array.
7199: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7200: @*/
7201: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7202: {
7203: Mat mat0;
7205: PetscFunctionBegin;
7206: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7207: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7208: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7209: PetscAssertPointer(mat, 2);
7211: mat0 = (*mat)[0];
7212: if (mat0 && mat0->ops->destroysubmatrices) {
7213: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7214: } else {
7215: PetscCall(MatDestroyMatrices(n, mat));
7216: }
7217: PetscFunctionReturn(PETSC_SUCCESS);
7218: }
7220: /*@C
7221: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7223: Collective
7225: Input Parameter:
7226: . mat - the matrix
7228: Output Parameter:
7229: . matstruct - the sequential matrix with the nonzero structure of mat
7231: Level: developer
7233: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7234: @*/
7235: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7236: {
7237: PetscFunctionBegin;
7239: PetscAssertPointer(matstruct, 2);
7242: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7243: MatCheckPreallocated(mat, 1);
7245: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7246: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7247: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7248: PetscFunctionReturn(PETSC_SUCCESS);
7249: }
7251: /*@C
7252: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7254: Collective
7256: Input Parameter:
7257: . mat - the matrix (this is a pointer to the array of matrices, just to match the calling
7258: sequence of `MatGetSeqNonzeroStructure()`)
7260: Level: advanced
7262: Note:
7263: Frees not only the matrices, but also the array that contains the matrices
7265: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7266: @*/
7267: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7268: {
7269: PetscFunctionBegin;
7270: PetscAssertPointer(mat, 1);
7271: PetscCall(MatDestroy(mat));
7272: PetscFunctionReturn(PETSC_SUCCESS);
7273: }
7275: /*@
7276: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7277: replaces the index sets by larger ones that represent submatrices with
7278: additional overlap.
7280: Collective
7282: Input Parameters:
7283: + mat - the matrix
7284: . n - the number of index sets
7285: . is - the array of index sets (these index sets will changed during the call)
7286: - ov - the additional overlap requested
7288: Options Database Key:
7289: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7291: Level: developer
7293: Note:
7294: The computed overlap preserves the matrix block sizes when the blocks are square.
7295: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7296: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7298: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7299: @*/
7300: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7301: {
7302: PetscInt i, bs, cbs;
7304: PetscFunctionBegin;
7308: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7309: if (n) {
7310: PetscAssertPointer(is, 3);
7312: }
7313: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7314: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7315: MatCheckPreallocated(mat, 1);
7317: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7318: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7319: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7320: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7321: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7322: if (bs == cbs) {
7323: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7324: }
7325: PetscFunctionReturn(PETSC_SUCCESS);
7326: }
7328: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7330: /*@
7331: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7332: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7333: additional overlap.
7335: Collective
7337: Input Parameters:
7338: + mat - the matrix
7339: . n - the number of index sets
7340: . is - the array of index sets (these index sets will changed during the call)
7341: - ov - the additional overlap requested
7343: ` Options Database Key:
7344: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7346: Level: developer
7348: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7349: @*/
7350: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7351: {
7352: PetscInt i;
7354: PetscFunctionBegin;
7357: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7358: if (n) {
7359: PetscAssertPointer(is, 3);
7361: }
7362: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7363: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7364: MatCheckPreallocated(mat, 1);
7365: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7366: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7367: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7368: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7369: PetscFunctionReturn(PETSC_SUCCESS);
7370: }
7372: /*@
7373: MatGetBlockSize - Returns the matrix block size.
7375: Not Collective
7377: Input Parameter:
7378: . mat - the matrix
7380: Output Parameter:
7381: . bs - block size
7383: Level: intermediate
7385: Notes:
7386: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7388: If the block size has not been set yet this routine returns 1.
7390: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7391: @*/
7392: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7393: {
7394: PetscFunctionBegin;
7396: PetscAssertPointer(bs, 2);
7397: *bs = PetscAbs(mat->rmap->bs);
7398: PetscFunctionReturn(PETSC_SUCCESS);
7399: }
7401: /*@
7402: MatGetBlockSizes - Returns the matrix block row and column sizes.
7404: Not Collective
7406: Input Parameter:
7407: . mat - the matrix
7409: Output Parameters:
7410: + rbs - row block size
7411: - cbs - column block size
7413: Level: intermediate
7415: Notes:
7416: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7417: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7419: If a block size has not been set yet this routine returns 1.
7421: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7422: @*/
7423: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7424: {
7425: PetscFunctionBegin;
7427: if (rbs) PetscAssertPointer(rbs, 2);
7428: if (cbs) PetscAssertPointer(cbs, 3);
7429: if (rbs) *rbs = PetscAbs(mat->rmap->bs);
7430: if (cbs) *cbs = PetscAbs(mat->cmap->bs);
7431: PetscFunctionReturn(PETSC_SUCCESS);
7432: }
7434: /*@
7435: MatSetBlockSize - Sets the matrix block size.
7437: Logically Collective
7439: Input Parameters:
7440: + mat - the matrix
7441: - bs - block size
7443: Level: intermediate
7445: Notes:
7446: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7447: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7449: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7450: is compatible with the matrix local sizes.
7452: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7453: @*/
7454: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7455: {
7456: PetscFunctionBegin;
7459: PetscCall(MatSetBlockSizes(mat, bs, bs));
7460: PetscFunctionReturn(PETSC_SUCCESS);
7461: }
7463: typedef struct {
7464: PetscInt n;
7465: IS *is;
7466: Mat *mat;
7467: PetscObjectState nonzerostate;
7468: Mat C;
7469: } EnvelopeData;
7471: static PetscErrorCode EnvelopeDataDestroy(EnvelopeData *edata)
7472: {
7473: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7474: PetscCall(PetscFree(edata->is));
7475: PetscCall(PetscFree(edata));
7476: return PETSC_SUCCESS;
7477: }
7479: /*@
7480: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7481: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7483: Collective
7485: Input Parameter:
7486: . mat - the matrix
7488: Level: intermediate
7490: Notes:
7491: There can be zeros within the blocks
7493: The blocks can overlap between processes, including laying on more than two processes
7495: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7496: @*/
7497: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7498: {
7499: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7500: PetscInt *diag, *odiag, sc;
7501: VecScatter scatter;
7502: PetscScalar *seqv;
7503: const PetscScalar *parv;
7504: const PetscInt *ia, *ja;
7505: PetscBool set, flag, done;
7506: Mat AA = mat, A;
7507: MPI_Comm comm;
7508: PetscMPIInt rank, size, tag;
7509: MPI_Status status;
7510: PetscContainer container;
7511: EnvelopeData *edata;
7512: Vec seq, par;
7513: IS isglobal;
7515: PetscFunctionBegin;
7517: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7518: if (!set || !flag) {
7519: /* TODO: only needs nonzero structure of transpose */
7520: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7521: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7522: }
7523: PetscCall(MatAIJGetLocalMat(AA, &A));
7524: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7525: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7527: PetscCall(MatGetLocalSize(mat, &n, NULL));
7528: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7529: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7530: PetscCallMPI(MPI_Comm_size(comm, &size));
7531: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7533: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7535: if (rank > 0) {
7536: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7537: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7538: }
7539: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7540: for (i = 0; i < n; i++) {
7541: env = PetscMax(env, ja[ia[i + 1] - 1]);
7542: II = rstart + i;
7543: if (env == II) {
7544: starts[lblocks] = tbs;
7545: sizes[lblocks++] = 1 + II - tbs;
7546: tbs = 1 + II;
7547: }
7548: }
7549: if (rank < size - 1) {
7550: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7551: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7552: }
7554: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7555: if (!set || !flag) PetscCall(MatDestroy(&AA));
7556: PetscCall(MatDestroy(&A));
7558: PetscCall(PetscNew(&edata));
7559: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7560: edata->n = lblocks;
7561: /* create IS needed for extracting blocks from the original matrix */
7562: PetscCall(PetscMalloc1(lblocks, &edata->is));
7563: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7565: /* Create the resulting inverse matrix structure with preallocation information */
7566: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7567: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7568: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7569: PetscCall(MatSetType(edata->C, MATAIJ));
7571: /* Communicate the start and end of each row, from each block to the correct rank */
7572: /* TODO: Use PetscSF instead of VecScatter */
7573: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7574: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7575: PetscCall(VecGetArrayWrite(seq, &seqv));
7576: for (PetscInt i = 0; i < lblocks; i++) {
7577: for (PetscInt j = 0; j < sizes[i]; j++) {
7578: seqv[cnt] = starts[i];
7579: seqv[cnt + 1] = starts[i] + sizes[i];
7580: cnt += 2;
7581: }
7582: }
7583: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7584: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7585: sc -= cnt;
7586: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7587: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7588: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7589: PetscCall(ISDestroy(&isglobal));
7590: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7591: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7592: PetscCall(VecScatterDestroy(&scatter));
7593: PetscCall(VecDestroy(&seq));
7594: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7595: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7596: PetscCall(VecGetArrayRead(par, &parv));
7597: cnt = 0;
7598: PetscCall(MatGetSize(mat, NULL, &n));
7599: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7600: PetscInt start, end, d = 0, od = 0;
7602: start = (PetscInt)PetscRealPart(parv[cnt]);
7603: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7604: cnt += 2;
7606: if (start < cstart) {
7607: od += cstart - start + n - cend;
7608: d += cend - cstart;
7609: } else if (start < cend) {
7610: od += n - cend;
7611: d += cend - start;
7612: } else od += n - start;
7613: if (end <= cstart) {
7614: od -= cstart - end + n - cend;
7615: d -= cend - cstart;
7616: } else if (end < cend) {
7617: od -= n - cend;
7618: d -= cend - end;
7619: } else od -= n - end;
7621: odiag[i] = od;
7622: diag[i] = d;
7623: }
7624: PetscCall(VecRestoreArrayRead(par, &parv));
7625: PetscCall(VecDestroy(&par));
7626: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7627: PetscCall(PetscFree2(diag, odiag));
7628: PetscCall(PetscFree2(sizes, starts));
7630: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7631: PetscCall(PetscContainerSetPointer(container, edata));
7632: PetscCall(PetscContainerSetUserDestroy(container, (PetscErrorCode(*)(void *))EnvelopeDataDestroy));
7633: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7634: PetscCall(PetscObjectDereference((PetscObject)container));
7635: PetscFunctionReturn(PETSC_SUCCESS);
7636: }
7638: /*@
7639: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7641: Collective
7643: Input Parameters:
7644: + A - the matrix
7645: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7647: Output Parameter:
7648: . C - matrix with inverted block diagonal of `A`
7650: Level: advanced
7652: Note:
7653: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7655: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7656: @*/
7657: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7658: {
7659: PetscContainer container;
7660: EnvelopeData *edata;
7661: PetscObjectState nonzerostate;
7663: PetscFunctionBegin;
7664: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7665: if (!container) {
7666: PetscCall(MatComputeVariableBlockEnvelope(A));
7667: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7668: }
7669: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7670: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7671: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7672: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7674: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7675: *C = edata->C;
7677: for (PetscInt i = 0; i < edata->n; i++) {
7678: Mat D;
7679: PetscScalar *dvalues;
7681: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7682: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7683: PetscCall(MatSeqDenseInvert(D));
7684: PetscCall(MatDenseGetArray(D, &dvalues));
7685: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7686: PetscCall(MatDestroy(&D));
7687: }
7688: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7689: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7690: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7691: PetscFunctionReturn(PETSC_SUCCESS);
7692: }
7694: /*@
7695: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7697: Logically Collective
7699: Input Parameters:
7700: + mat - the matrix
7701: . nblocks - the number of blocks on this process, each block can only exist on a single process
7702: - bsizes - the block sizes
7704: Level: intermediate
7706: Notes:
7707: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7709: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7711: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7712: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7713: @*/
7714: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, PetscInt *bsizes)
7715: {
7716: PetscInt i, ncnt = 0, nlocal;
7718: PetscFunctionBegin;
7720: PetscCheck(nblocks >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks must be great than or equal to zero");
7721: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7722: for (i = 0; i < nblocks; i++) ncnt += bsizes[i];
7723: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7724: PetscCall(PetscFree(mat->bsizes));
7725: mat->nblocks = nblocks;
7726: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7727: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7728: PetscFunctionReturn(PETSC_SUCCESS);
7729: }
7731: /*@C
7732: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7734: Logically Collective; No Fortran Support
7736: Input Parameter:
7737: . mat - the matrix
7739: Output Parameters:
7740: + nblocks - the number of blocks on this process
7741: - bsizes - the block sizes
7743: Level: intermediate
7745: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7746: @*/
7747: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt **bsizes)
7748: {
7749: PetscFunctionBegin;
7751: *nblocks = mat->nblocks;
7752: *bsizes = mat->bsizes;
7753: PetscFunctionReturn(PETSC_SUCCESS);
7754: }
7756: /*@
7757: MatSetBlockSizes - Sets the matrix block row and column sizes.
7759: Logically Collective
7761: Input Parameters:
7762: + mat - the matrix
7763: . rbs - row block size
7764: - cbs - column block size
7766: Level: intermediate
7768: Notes:
7769: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
7770: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7771: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7773: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
7774: are compatible with the matrix local sizes.
7776: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
7778: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
7779: @*/
7780: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
7781: {
7782: PetscFunctionBegin;
7786: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
7787: if (mat->rmap->refcnt) {
7788: ISLocalToGlobalMapping l2g = NULL;
7789: PetscLayout nmap = NULL;
7791: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
7792: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
7793: PetscCall(PetscLayoutDestroy(&mat->rmap));
7794: mat->rmap = nmap;
7795: mat->rmap->mapping = l2g;
7796: }
7797: if (mat->cmap->refcnt) {
7798: ISLocalToGlobalMapping l2g = NULL;
7799: PetscLayout nmap = NULL;
7801: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
7802: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
7803: PetscCall(PetscLayoutDestroy(&mat->cmap));
7804: mat->cmap = nmap;
7805: mat->cmap->mapping = l2g;
7806: }
7807: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
7808: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
7809: PetscFunctionReturn(PETSC_SUCCESS);
7810: }
7812: /*@
7813: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
7815: Logically Collective
7817: Input Parameters:
7818: + mat - the matrix
7819: . fromRow - matrix from which to copy row block size
7820: - fromCol - matrix from which to copy column block size (can be same as fromRow)
7822: Level: developer
7824: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
7825: @*/
7826: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
7827: {
7828: PetscFunctionBegin;
7832: if (fromRow->rmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
7833: if (fromCol->cmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
7834: PetscFunctionReturn(PETSC_SUCCESS);
7835: }
7837: /*@
7838: MatResidual - Default routine to calculate the residual r = b - Ax
7840: Collective
7842: Input Parameters:
7843: + mat - the matrix
7844: . b - the right-hand-side
7845: - x - the approximate solution
7847: Output Parameter:
7848: . r - location to store the residual
7850: Level: developer
7852: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
7853: @*/
7854: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
7855: {
7856: PetscFunctionBegin;
7862: MatCheckPreallocated(mat, 1);
7863: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
7864: if (!mat->ops->residual) {
7865: PetscCall(MatMult(mat, x, r));
7866: PetscCall(VecAYPX(r, -1.0, b));
7867: } else {
7868: PetscUseTypeMethod(mat, residual, b, x, r);
7869: }
7870: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
7871: PetscFunctionReturn(PETSC_SUCCESS);
7872: }
7874: /*MC
7875: MatGetRowIJF90 - Obtains the compressed row storage i and j indices for the local rows of a sparse matrix
7877: Synopsis:
7878: MatGetRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
7880: Not Collective
7882: Input Parameters:
7883: + A - the matrix
7884: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7885: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7886: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7887: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7888: always used.
7890: Output Parameters:
7891: + n - number of local rows in the (possibly compressed) matrix
7892: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7893: . ja - the column indices
7894: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7895: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7897: Level: developer
7899: Note:
7900: Use `MatRestoreRowIJF90()` when you no longer need access to the data
7902: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatRestoreRowIJF90()`
7903: M*/
7905: /*MC
7906: MatRestoreRowIJF90 - restores the compressed row storage i and j indices for the local rows of a sparse matrix obtained with `MatGetRowIJF90()`
7908: Synopsis:
7909: MatRestoreRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
7911: Not Collective
7913: Input Parameters:
7914: + A - the matrix
7915: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7916: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7917: inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7918: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7919: always used.
7920: . n - number of local rows in the (possibly compressed) matrix
7921: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7922: . ja - the column indices
7923: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7924: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7926: Level: developer
7928: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatGetRowIJF90()`
7929: M*/
7931: /*@C
7932: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
7934: Collective
7936: Input Parameters:
7937: + mat - the matrix
7938: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7939: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7940: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7941: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7942: always used.
7944: Output Parameters:
7945: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
7946: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
7947: . ja - the column indices, use `NULL` if not needed
7948: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7949: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7951: Level: developer
7953: Notes:
7954: You CANNOT change any of the ia[] or ja[] values.
7956: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
7958: Fortran Notes:
7959: Use
7960: .vb
7961: PetscInt, pointer :: ia(:),ja(:)
7962: call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
7963: ! Access the ith and jth entries via ia(i) and ja(j)
7964: .ve
7965: `MatGetRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatGetRowIJF90()`
7967: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetRowIJF90()`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
7968: @*/
7969: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
7970: {
7971: PetscFunctionBegin;
7974: if (n) PetscAssertPointer(n, 5);
7975: if (ia) PetscAssertPointer(ia, 6);
7976: if (ja) PetscAssertPointer(ja, 7);
7977: if (done) PetscAssertPointer(done, 8);
7978: MatCheckPreallocated(mat, 1);
7979: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
7980: else {
7981: if (done) *done = PETSC_TRUE;
7982: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
7983: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
7984: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
7985: }
7986: PetscFunctionReturn(PETSC_SUCCESS);
7987: }
7989: /*@C
7990: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
7992: Collective
7994: Input Parameters:
7995: + mat - the matrix
7996: . shift - 1 or zero indicating we want the indices starting at 0 or 1
7997: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
7998: symmetrized
7999: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8000: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8001: always used.
8002: . n - number of columns in the (possibly compressed) matrix
8003: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8004: - ja - the row indices
8006: Output Parameter:
8007: . done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8009: Level: developer
8011: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8012: @*/
8013: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8014: {
8015: PetscFunctionBegin;
8018: PetscAssertPointer(n, 5);
8019: if (ia) PetscAssertPointer(ia, 6);
8020: if (ja) PetscAssertPointer(ja, 7);
8021: PetscAssertPointer(done, 8);
8022: MatCheckPreallocated(mat, 1);
8023: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8024: else {
8025: *done = PETSC_TRUE;
8026: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8027: }
8028: PetscFunctionReturn(PETSC_SUCCESS);
8029: }
8031: /*@C
8032: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8034: Collective
8036: Input Parameters:
8037: + mat - the matrix
8038: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8039: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8040: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8041: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8042: always used.
8043: . n - size of (possibly compressed) matrix
8044: . ia - the row pointers
8045: - ja - the column indices
8047: Output Parameter:
8048: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8050: Level: developer
8052: Note:
8053: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8054: us of the array after it has been restored. If you pass `NULL`, it will
8055: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8057: Fortran Notes:
8058: `MatRestoreRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatRestoreRowIJF90()`
8060: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreRowIJF90()`, `MatRestoreColumnIJ()`
8061: @*/
8062: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8063: {
8064: PetscFunctionBegin;
8067: if (ia) PetscAssertPointer(ia, 6);
8068: if (ja) PetscAssertPointer(ja, 7);
8069: if (done) PetscAssertPointer(done, 8);
8070: MatCheckPreallocated(mat, 1);
8072: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8073: else {
8074: if (done) *done = PETSC_TRUE;
8075: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8076: if (n) *n = 0;
8077: if (ia) *ia = NULL;
8078: if (ja) *ja = NULL;
8079: }
8080: PetscFunctionReturn(PETSC_SUCCESS);
8081: }
8083: /*@C
8084: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8086: Collective
8088: Input Parameters:
8089: + mat - the matrix
8090: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8091: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8092: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8093: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8094: always used.
8096: Output Parameters:
8097: + n - size of (possibly compressed) matrix
8098: . ia - the column pointers
8099: . ja - the row indices
8100: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8102: Level: developer
8104: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8105: @*/
8106: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8107: {
8108: PetscFunctionBegin;
8111: if (ia) PetscAssertPointer(ia, 6);
8112: if (ja) PetscAssertPointer(ja, 7);
8113: PetscAssertPointer(done, 8);
8114: MatCheckPreallocated(mat, 1);
8116: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8117: else {
8118: *done = PETSC_TRUE;
8119: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8120: if (n) *n = 0;
8121: if (ia) *ia = NULL;
8122: if (ja) *ja = NULL;
8123: }
8124: PetscFunctionReturn(PETSC_SUCCESS);
8125: }
8127: /*@C
8128: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8129: `MatGetColumnIJ()`.
8131: Collective
8133: Input Parameters:
8134: + mat - the matrix
8135: . ncolors - maximum color value
8136: . n - number of entries in colorarray
8137: - colorarray - array indicating color for each column
8139: Output Parameter:
8140: . iscoloring - coloring generated using colorarray information
8142: Level: developer
8144: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8145: @*/
8146: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8147: {
8148: PetscFunctionBegin;
8151: PetscAssertPointer(colorarray, 4);
8152: PetscAssertPointer(iscoloring, 5);
8153: MatCheckPreallocated(mat, 1);
8155: if (!mat->ops->coloringpatch) {
8156: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8157: } else {
8158: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8159: }
8160: PetscFunctionReturn(PETSC_SUCCESS);
8161: }
8163: /*@
8164: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8166: Logically Collective
8168: Input Parameter:
8169: . mat - the factored matrix to be reset
8171: Level: developer
8173: Notes:
8174: This routine should be used only with factored matrices formed by in-place
8175: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8176: format). This option can save memory, for example, when solving nonlinear
8177: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8178: ILU(0) preconditioner.
8180: One can specify in-place ILU(0) factorization by calling
8181: .vb
8182: PCType(pc,PCILU);
8183: PCFactorSeUseInPlace(pc);
8184: .ve
8185: or by using the options -pc_type ilu -pc_factor_in_place
8187: In-place factorization ILU(0) can also be used as a local
8188: solver for the blocks within the block Jacobi or additive Schwarz
8189: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8190: for details on setting local solver options.
8192: Most users should employ the `KSP` interface for linear solvers
8193: instead of working directly with matrix algebra routines such as this.
8194: See, e.g., `KSPCreate()`.
8196: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8197: @*/
8198: PetscErrorCode MatSetUnfactored(Mat mat)
8199: {
8200: PetscFunctionBegin;
8203: MatCheckPreallocated(mat, 1);
8204: mat->factortype = MAT_FACTOR_NONE;
8205: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8206: PetscUseTypeMethod(mat, setunfactored);
8207: PetscFunctionReturn(PETSC_SUCCESS);
8208: }
8210: /*MC
8211: MatDenseGetArrayF90 - Accesses a matrix array from Fortran
8213: Synopsis:
8214: MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8216: Not Collective
8218: Input Parameter:
8219: . x - matrix
8221: Output Parameters:
8222: + xx_v - the Fortran pointer to the array
8223: - ierr - error code
8225: Example of Usage:
8226: .vb
8227: PetscScalar, pointer xx_v(:,:)
8228: ....
8229: call MatDenseGetArrayF90(x,xx_v,ierr)
8230: a = xx_v(3)
8231: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8232: .ve
8234: Level: advanced
8236: .seealso: [](ch_matrices), `Mat`, `MatDenseRestoreArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJGetArrayF90()`
8237: M*/
8239: /*MC
8240: MatDenseRestoreArrayF90 - Restores a matrix array that has been
8241: accessed with `MatDenseGetArrayF90()`.
8243: Synopsis:
8244: MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8246: Not Collective
8248: Input Parameters:
8249: + x - matrix
8250: - xx_v - the Fortran90 pointer to the array
8252: Output Parameter:
8253: . ierr - error code
8255: Example of Usage:
8256: .vb
8257: PetscScalar, pointer xx_v(:,:)
8258: ....
8259: call MatDenseGetArrayF90(x,xx_v,ierr)
8260: a = xx_v(3)
8261: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8262: .ve
8264: Level: advanced
8266: .seealso: [](ch_matrices), `Mat`, `MatDenseGetArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJRestoreArrayF90()`
8267: M*/
8269: /*MC
8270: MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran.
8272: Synopsis:
8273: MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8275: Not Collective
8277: Input Parameter:
8278: . x - matrix
8280: Output Parameters:
8281: + xx_v - the Fortran pointer to the array
8282: - ierr - error code
8284: Example of Usage:
8285: .vb
8286: PetscScalar, pointer xx_v(:)
8287: ....
8288: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8289: a = xx_v(3)
8290: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8291: .ve
8293: Level: advanced
8295: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRestoreArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseGetArrayF90()`
8296: M*/
8298: /*MC
8299: MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
8300: accessed with `MatSeqAIJGetArrayF90()`.
8302: Synopsis:
8303: MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8305: Not Collective
8307: Input Parameters:
8308: + x - matrix
8309: - xx_v - the Fortran90 pointer to the array
8311: Output Parameter:
8312: . ierr - error code
8314: Example of Usage:
8315: .vb
8316: PetscScalar, pointer xx_v(:)
8317: ....
8318: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8319: a = xx_v(3)
8320: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8321: .ve
8323: Level: advanced
8325: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseRestoreArrayF90()`
8326: M*/
8328: /*@
8329: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8330: as the original matrix.
8332: Collective
8334: Input Parameters:
8335: + mat - the original matrix
8336: . isrow - parallel `IS` containing the rows this processor should obtain
8337: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8338: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8340: Output Parameter:
8341: . newmat - the new submatrix, of the same type as the original matrix
8343: Level: advanced
8345: Notes:
8346: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8348: Some matrix types place restrictions on the row and column indices, such
8349: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8350: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8352: The index sets may not have duplicate entries.
8354: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8355: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8356: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8357: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8358: you are finished using it.
8360: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8361: the input matrix.
8363: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8365: Example usage:
8366: Consider the following 8x8 matrix with 34 non-zero values, that is
8367: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8368: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8369: as follows
8370: .vb
8371: 1 2 0 | 0 3 0 | 0 4
8372: Proc0 0 5 6 | 7 0 0 | 8 0
8373: 9 0 10 | 11 0 0 | 12 0
8374: -------------------------------------
8375: 13 0 14 | 15 16 17 | 0 0
8376: Proc1 0 18 0 | 19 20 21 | 0 0
8377: 0 0 0 | 22 23 0 | 24 0
8378: -------------------------------------
8379: Proc2 25 26 27 | 0 0 28 | 29 0
8380: 30 0 0 | 31 32 33 | 0 34
8381: .ve
8383: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8385: .vb
8386: 2 0 | 0 3 0 | 0
8387: Proc0 5 6 | 7 0 0 | 8
8388: -------------------------------
8389: Proc1 18 0 | 19 20 21 | 0
8390: -------------------------------
8391: Proc2 26 27 | 0 0 28 | 29
8392: 0 0 | 31 32 33 | 0
8393: .ve
8395: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8396: @*/
8397: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8398: {
8399: PetscMPIInt size;
8400: Mat *local;
8401: IS iscoltmp;
8402: PetscBool flg;
8404: PetscFunctionBegin;
8408: PetscAssertPointer(newmat, 5);
8411: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8412: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8414: MatCheckPreallocated(mat, 1);
8415: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8417: if (!iscol || isrow == iscol) {
8418: PetscBool stride;
8419: PetscMPIInt grabentirematrix = 0, grab;
8420: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8421: if (stride) {
8422: PetscInt first, step, n, rstart, rend;
8423: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8424: if (step == 1) {
8425: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8426: if (rstart == first) {
8427: PetscCall(ISGetLocalSize(isrow, &n));
8428: if (n == rend - rstart) grabentirematrix = 1;
8429: }
8430: }
8431: }
8432: PetscCall(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8433: if (grab) {
8434: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8435: if (cll == MAT_INITIAL_MATRIX) {
8436: *newmat = mat;
8437: PetscCall(PetscObjectReference((PetscObject)mat));
8438: }
8439: PetscFunctionReturn(PETSC_SUCCESS);
8440: }
8441: }
8443: if (!iscol) {
8444: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8445: } else {
8446: iscoltmp = iscol;
8447: }
8449: /* if original matrix is on just one processor then use submatrix generated */
8450: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8451: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8452: goto setproperties;
8453: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8454: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8455: *newmat = *local;
8456: PetscCall(PetscFree(local));
8457: goto setproperties;
8458: } else if (!mat->ops->createsubmatrix) {
8459: /* Create a new matrix type that implements the operation using the full matrix */
8460: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8461: switch (cll) {
8462: case MAT_INITIAL_MATRIX:
8463: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8464: break;
8465: case MAT_REUSE_MATRIX:
8466: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8467: break;
8468: default:
8469: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8470: }
8471: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8472: goto setproperties;
8473: }
8475: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8476: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8477: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8479: setproperties:
8480: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8481: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8482: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8483: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8484: PetscFunctionReturn(PETSC_SUCCESS);
8485: }
8487: /*@
8488: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8490: Not Collective
8492: Input Parameters:
8493: + A - the matrix we wish to propagate options from
8494: - B - the matrix we wish to propagate options to
8496: Level: beginner
8498: Note:
8499: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8501: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8502: @*/
8503: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8504: {
8505: PetscFunctionBegin;
8508: B->symmetry_eternal = A->symmetry_eternal;
8509: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8510: B->symmetric = A->symmetric;
8511: B->structurally_symmetric = A->structurally_symmetric;
8512: B->spd = A->spd;
8513: B->hermitian = A->hermitian;
8514: PetscFunctionReturn(PETSC_SUCCESS);
8515: }
8517: /*@
8518: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8519: used during the assembly process to store values that belong to
8520: other processors.
8522: Not Collective
8524: Input Parameters:
8525: + mat - the matrix
8526: . size - the initial size of the stash.
8527: - bsize - the initial size of the block-stash(if used).
8529: Options Database Keys:
8530: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8531: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8533: Level: intermediate
8535: Notes:
8536: The block-stash is used for values set with `MatSetValuesBlocked()` while
8537: the stash is used for values set with `MatSetValues()`
8539: Run with the option -info and look for output of the form
8540: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8541: to determine the appropriate value, MM, to use for size and
8542: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8543: to determine the value, BMM to use for bsize
8545: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8546: @*/
8547: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8548: {
8549: PetscFunctionBegin;
8552: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8553: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8554: PetscFunctionReturn(PETSC_SUCCESS);
8555: }
8557: /*@
8558: MatInterpolateAdd - w = y + A*x or A'*x depending on the shape of
8559: the matrix
8561: Neighbor-wise Collective
8563: Input Parameters:
8564: + A - the matrix
8565: . x - the vector to be multiplied by the interpolation operator
8566: - y - the vector to be added to the result
8568: Output Parameter:
8569: . w - the resulting vector
8571: Level: intermediate
8573: Notes:
8574: `w` may be the same vector as `y`.
8576: This allows one to use either the restriction or interpolation (its transpose)
8577: matrix to do the interpolation
8579: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8580: @*/
8581: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8582: {
8583: PetscInt M, N, Ny;
8585: PetscFunctionBegin;
8590: PetscCall(MatGetSize(A, &M, &N));
8591: PetscCall(VecGetSize(y, &Ny));
8592: if (M == Ny) {
8593: PetscCall(MatMultAdd(A, x, y, w));
8594: } else {
8595: PetscCall(MatMultTransposeAdd(A, x, y, w));
8596: }
8597: PetscFunctionReturn(PETSC_SUCCESS);
8598: }
8600: /*@
8601: MatInterpolate - y = A*x or A'*x depending on the shape of
8602: the matrix
8604: Neighbor-wise Collective
8606: Input Parameters:
8607: + A - the matrix
8608: - x - the vector to be interpolated
8610: Output Parameter:
8611: . y - the resulting vector
8613: Level: intermediate
8615: Note:
8616: This allows one to use either the restriction or interpolation (its transpose)
8617: matrix to do the interpolation
8619: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8620: @*/
8621: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8622: {
8623: PetscInt M, N, Ny;
8625: PetscFunctionBegin;
8629: PetscCall(MatGetSize(A, &M, &N));
8630: PetscCall(VecGetSize(y, &Ny));
8631: if (M == Ny) {
8632: PetscCall(MatMult(A, x, y));
8633: } else {
8634: PetscCall(MatMultTranspose(A, x, y));
8635: }
8636: PetscFunctionReturn(PETSC_SUCCESS);
8637: }
8639: /*@
8640: MatRestrict - y = A*x or A'*x
8642: Neighbor-wise Collective
8644: Input Parameters:
8645: + A - the matrix
8646: - x - the vector to be restricted
8648: Output Parameter:
8649: . y - the resulting vector
8651: Level: intermediate
8653: Note:
8654: This allows one to use either the restriction or interpolation (its transpose)
8655: matrix to do the restriction
8657: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8658: @*/
8659: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8660: {
8661: PetscInt M, N, Ny;
8663: PetscFunctionBegin;
8667: PetscCall(MatGetSize(A, &M, &N));
8668: PetscCall(VecGetSize(y, &Ny));
8669: if (M == Ny) {
8670: PetscCall(MatMult(A, x, y));
8671: } else {
8672: PetscCall(MatMultTranspose(A, x, y));
8673: }
8674: PetscFunctionReturn(PETSC_SUCCESS);
8675: }
8677: /*@
8678: MatMatInterpolateAdd - Y = W + A*X or W + A'*X
8680: Neighbor-wise Collective
8682: Input Parameters:
8683: + A - the matrix
8684: . x - the input dense matrix to be multiplied
8685: - w - the input dense matrix to be added to the result
8687: Output Parameter:
8688: . y - the output dense matrix
8690: Level: intermediate
8692: Note:
8693: This allows one to use either the restriction or interpolation (its transpose)
8694: matrix to do the interpolation. y matrix can be reused if already created with the proper sizes,
8695: otherwise it will be recreated. y must be initialized to `NULL` if not supplied.
8697: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8698: @*/
8699: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8700: {
8701: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8702: PetscBool trans = PETSC_TRUE;
8703: MatReuse reuse = MAT_INITIAL_MATRIX;
8705: PetscFunctionBegin;
8711: PetscCall(MatGetSize(A, &M, &N));
8712: PetscCall(MatGetSize(x, &Mx, &Nx));
8713: if (N == Mx) trans = PETSC_FALSE;
8714: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8715: Mo = trans ? N : M;
8716: if (*y) {
8717: PetscCall(MatGetSize(*y, &My, &Ny));
8718: if (Mo == My && Nx == Ny) {
8719: reuse = MAT_REUSE_MATRIX;
8720: } else {
8721: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8722: PetscCall(MatDestroy(y));
8723: }
8724: }
8726: if (w && *y == w) { /* this is to minimize changes in PCMG */
8727: PetscBool flg;
8729: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8730: if (w) {
8731: PetscInt My, Ny, Mw, Nw;
8733: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8734: PetscCall(MatGetSize(*y, &My, &Ny));
8735: PetscCall(MatGetSize(w, &Mw, &Nw));
8736: if (!flg || My != Mw || Ny != Nw) w = NULL;
8737: }
8738: if (!w) {
8739: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8740: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8741: PetscCall(PetscObjectDereference((PetscObject)w));
8742: } else {
8743: PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8744: }
8745: }
8746: if (!trans) {
8747: PetscCall(MatMatMult(A, x, reuse, PETSC_DEFAULT, y));
8748: } else {
8749: PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DEFAULT, y));
8750: }
8751: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8752: PetscFunctionReturn(PETSC_SUCCESS);
8753: }
8755: /*@
8756: MatMatInterpolate - Y = A*X or A'*X
8758: Neighbor-wise Collective
8760: Input Parameters:
8761: + A - the matrix
8762: - x - the input dense matrix
8764: Output Parameter:
8765: . y - the output dense matrix
8767: Level: intermediate
8769: Note:
8770: This allows one to use either the restriction or interpolation (its transpose)
8771: matrix to do the interpolation. y matrix can be reused if already created with the proper sizes,
8772: otherwise it will be recreated. y must be initialized to `NULL` if not supplied.
8774: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8775: @*/
8776: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8777: {
8778: PetscFunctionBegin;
8779: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8780: PetscFunctionReturn(PETSC_SUCCESS);
8781: }
8783: /*@
8784: MatMatRestrict - Y = A*X or A'*X
8786: Neighbor-wise Collective
8788: Input Parameters:
8789: + A - the matrix
8790: - x - the input dense matrix
8792: Output Parameter:
8793: . y - the output dense matrix
8795: Level: intermediate
8797: Note:
8798: This allows one to use either the restriction or interpolation (its transpose)
8799: matrix to do the restriction. y matrix can be reused if already created with the proper sizes,
8800: otherwise it will be recreated. y must be initialized to `NULL` if not supplied.
8802: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
8803: @*/
8804: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
8805: {
8806: PetscFunctionBegin;
8807: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8808: PetscFunctionReturn(PETSC_SUCCESS);
8809: }
8811: /*@
8812: MatGetNullSpace - retrieves the null space of a matrix.
8814: Logically Collective
8816: Input Parameters:
8817: + mat - the matrix
8818: - nullsp - the null space object
8820: Level: developer
8822: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
8823: @*/
8824: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8825: {
8826: PetscFunctionBegin;
8828: PetscAssertPointer(nullsp, 2);
8829: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8830: PetscFunctionReturn(PETSC_SUCCESS);
8831: }
8833: /*@
8834: MatSetNullSpace - attaches a null space to a matrix.
8836: Logically Collective
8838: Input Parameters:
8839: + mat - the matrix
8840: - nullsp - the null space object
8842: Level: advanced
8844: Notes:
8845: This null space is used by the `KSP` linear solvers to solve singular systems.
8847: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
8849: For inconsistent singular systems (linear systems where the right hand side is not in the range of the operator) the `KSP` residuals will not converge to
8850: to zero but the linear system will still be solved in a least squares sense.
8852: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
8853: the domain of a matrix A (from R^n to R^m (m rows, n columns) R^n = the direct sum of the null space of A, n(A), + the range of A^T, R(A^T).
8854: Similarly R^m = direct sum n(A^T) + R(A). Hence the linear system A x = b has a solution only if b in R(A) (or correspondingly b is orthogonal to
8855: n(A^T)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
8856: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n(A^T).
8857: This \hat{b} can be obtained by calling MatNullSpaceRemove() with the null space of the transpose of the matrix.
8859: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one as called
8860: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
8861: routine also automatically calls `MatSetTransposeNullSpace()`.
8863: The user should call `MatNullSpaceDestroy()`.
8865: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
8866: `KSPSetPCSide()`
8867: @*/
8868: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
8869: {
8870: PetscFunctionBegin;
8873: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
8874: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
8875: mat->nullsp = nullsp;
8876: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
8877: PetscFunctionReturn(PETSC_SUCCESS);
8878: }
8880: /*@
8881: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
8883: Logically Collective
8885: Input Parameters:
8886: + mat - the matrix
8887: - nullsp - the null space object
8889: Level: developer
8891: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
8892: @*/
8893: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
8894: {
8895: PetscFunctionBegin;
8898: PetscAssertPointer(nullsp, 2);
8899: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
8900: PetscFunctionReturn(PETSC_SUCCESS);
8901: }
8903: /*@
8904: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
8906: Logically Collective
8908: Input Parameters:
8909: + mat - the matrix
8910: - nullsp - the null space object
8912: Level: advanced
8914: Notes:
8915: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
8917: See `MatSetNullSpace()`
8919: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
8920: @*/
8921: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
8922: {
8923: PetscFunctionBegin;
8926: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
8927: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
8928: mat->transnullsp = nullsp;
8929: PetscFunctionReturn(PETSC_SUCCESS);
8930: }
8932: /*@
8933: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
8934: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
8936: Logically Collective
8938: Input Parameters:
8939: + mat - the matrix
8940: - nullsp - the null space object
8942: Level: advanced
8944: Notes:
8945: Overwrites any previous near null space that may have been attached
8947: You can remove the null space by calling this routine with an nullsp of `NULL`
8949: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
8950: @*/
8951: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
8952: {
8953: PetscFunctionBegin;
8957: MatCheckPreallocated(mat, 1);
8958: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
8959: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
8960: mat->nearnullsp = nullsp;
8961: PetscFunctionReturn(PETSC_SUCCESS);
8962: }
8964: /*@
8965: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
8967: Not Collective
8969: Input Parameter:
8970: . mat - the matrix
8972: Output Parameter:
8973: . nullsp - the null space object, `NULL` if not set
8975: Level: advanced
8977: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
8978: @*/
8979: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
8980: {
8981: PetscFunctionBegin;
8984: PetscAssertPointer(nullsp, 2);
8985: MatCheckPreallocated(mat, 1);
8986: *nullsp = mat->nearnullsp;
8987: PetscFunctionReturn(PETSC_SUCCESS);
8988: }
8990: /*@C
8991: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
8993: Collective
8995: Input Parameters:
8996: + mat - the matrix
8997: . row - row/column permutation
8998: - info - information on desired factorization process
9000: Level: developer
9002: Notes:
9003: Probably really in-place only when level of fill is zero, otherwise allocates
9004: new space to store factored matrix and deletes previous memory.
9006: Most users should employ the `KSP` interface for linear solvers
9007: instead of working directly with matrix algebra routines such as this.
9008: See, e.g., `KSPCreate()`.
9010: Developer Notes:
9011: The Fortran interface is not autogenerated as the
9012: interface definition cannot be generated correctly [due to `MatFactorInfo`]
9014: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9015: @*/
9016: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9017: {
9018: PetscFunctionBegin;
9022: PetscAssertPointer(info, 3);
9023: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9024: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9025: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9026: MatCheckPreallocated(mat, 1);
9027: PetscUseTypeMethod(mat, iccfactor, row, info);
9028: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9029: PetscFunctionReturn(PETSC_SUCCESS);
9030: }
9032: /*@
9033: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9034: ghosted ones.
9036: Not Collective
9038: Input Parameters:
9039: + mat - the matrix
9040: - diag - the diagonal values, including ghost ones
9042: Level: developer
9044: Notes:
9045: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9047: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9049: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9050: @*/
9051: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9052: {
9053: PetscMPIInt size;
9055: PetscFunctionBegin;
9060: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9061: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9062: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9063: if (size == 1) {
9064: PetscInt n, m;
9065: PetscCall(VecGetSize(diag, &n));
9066: PetscCall(MatGetSize(mat, NULL, &m));
9067: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9068: PetscCall(MatDiagonalScale(mat, NULL, diag));
9069: } else {
9070: PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9071: }
9072: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9073: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9074: PetscFunctionReturn(PETSC_SUCCESS);
9075: }
9077: /*@
9078: MatGetInertia - Gets the inertia from a factored matrix
9080: Collective
9082: Input Parameter:
9083: . mat - the matrix
9085: Output Parameters:
9086: + nneg - number of negative eigenvalues
9087: . nzero - number of zero eigenvalues
9088: - npos - number of positive eigenvalues
9090: Level: advanced
9092: Note:
9093: Matrix must have been factored by `MatCholeskyFactor()`
9095: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9096: @*/
9097: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9098: {
9099: PetscFunctionBegin;
9102: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9103: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9104: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9105: PetscFunctionReturn(PETSC_SUCCESS);
9106: }
9108: /*@C
9109: MatSolves - Solves A x = b, given a factored matrix, for a collection of vectors
9111: Neighbor-wise Collective
9113: Input Parameters:
9114: + mat - the factored matrix obtained with `MatGetFactor()`
9115: - b - the right-hand-side vectors
9117: Output Parameter:
9118: . x - the result vectors
9120: Level: developer
9122: Note:
9123: The vectors `b` and `x` cannot be the same. I.e., one cannot
9124: call `MatSolves`(A,x,x).
9126: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9127: @*/
9128: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9129: {
9130: PetscFunctionBegin;
9133: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9134: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9135: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9137: MatCheckPreallocated(mat, 1);
9138: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9139: PetscUseTypeMethod(mat, solves, b, x);
9140: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9141: PetscFunctionReturn(PETSC_SUCCESS);
9142: }
9144: /*@
9145: MatIsSymmetric - Test whether a matrix is symmetric
9147: Collective
9149: Input Parameters:
9150: + A - the matrix to test
9151: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9153: Output Parameter:
9154: . flg - the result
9156: Level: intermediate
9158: Notes:
9159: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9161: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9163: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9164: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9166: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9167: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9168: @*/
9169: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9170: {
9171: PetscFunctionBegin;
9173: PetscAssertPointer(flg, 3);
9175: if (A->symmetric == PETSC_BOOL3_TRUE) *flg = PETSC_TRUE;
9176: else if (A->symmetric == PETSC_BOOL3_FALSE) *flg = PETSC_FALSE;
9177: else {
9178: PetscUseTypeMethod(A, issymmetric, tol, flg);
9179: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9180: }
9181: PetscFunctionReturn(PETSC_SUCCESS);
9182: }
9184: /*@
9185: MatIsHermitian - Test whether a matrix is Hermitian
9187: Collective
9189: Input Parameters:
9190: + A - the matrix to test
9191: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9193: Output Parameter:
9194: . flg - the result
9196: Level: intermediate
9198: Notes:
9199: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9201: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9203: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9204: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9206: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9207: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9208: @*/
9209: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9210: {
9211: PetscFunctionBegin;
9213: PetscAssertPointer(flg, 3);
9215: if (A->hermitian == PETSC_BOOL3_TRUE) *flg = PETSC_TRUE;
9216: else if (A->hermitian == PETSC_BOOL3_FALSE) *flg = PETSC_FALSE;
9217: else {
9218: PetscUseTypeMethod(A, ishermitian, tol, flg);
9219: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9220: }
9221: PetscFunctionReturn(PETSC_SUCCESS);
9222: }
9224: /*@
9225: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9227: Not Collective
9229: Input Parameter:
9230: . A - the matrix to check
9232: Output Parameters:
9233: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9234: - flg - the result (only valid if set is `PETSC_TRUE`)
9236: Level: advanced
9238: Notes:
9239: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9240: if you want it explicitly checked
9242: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9243: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9245: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9246: @*/
9247: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9248: {
9249: PetscFunctionBegin;
9251: PetscAssertPointer(set, 2);
9252: PetscAssertPointer(flg, 3);
9253: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9254: *set = PETSC_TRUE;
9255: *flg = PetscBool3ToBool(A->symmetric);
9256: } else {
9257: *set = PETSC_FALSE;
9258: }
9259: PetscFunctionReturn(PETSC_SUCCESS);
9260: }
9262: /*@
9263: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9265: Not Collective
9267: Input Parameter:
9268: . A - the matrix to check
9270: Output Parameters:
9271: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9272: - flg - the result (only valid if set is `PETSC_TRUE`)
9274: Level: advanced
9276: Notes:
9277: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9279: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9280: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9282: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9283: @*/
9284: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9285: {
9286: PetscFunctionBegin;
9288: PetscAssertPointer(set, 2);
9289: PetscAssertPointer(flg, 3);
9290: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9291: *set = PETSC_TRUE;
9292: *flg = PetscBool3ToBool(A->spd);
9293: } else {
9294: *set = PETSC_FALSE;
9295: }
9296: PetscFunctionReturn(PETSC_SUCCESS);
9297: }
9299: /*@
9300: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9302: Not Collective
9304: Input Parameter:
9305: . A - the matrix to check
9307: Output Parameters:
9308: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9309: - flg - the result (only valid if set is `PETSC_TRUE`)
9311: Level: advanced
9313: Notes:
9314: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9315: if you want it explicitly checked
9317: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9318: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9320: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9321: @*/
9322: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9323: {
9324: PetscFunctionBegin;
9326: PetscAssertPointer(set, 2);
9327: PetscAssertPointer(flg, 3);
9328: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9329: *set = PETSC_TRUE;
9330: *flg = PetscBool3ToBool(A->hermitian);
9331: } else {
9332: *set = PETSC_FALSE;
9333: }
9334: PetscFunctionReturn(PETSC_SUCCESS);
9335: }
9337: /*@
9338: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9340: Collective
9342: Input Parameter:
9343: . A - the matrix to test
9345: Output Parameter:
9346: . flg - the result
9348: Level: intermediate
9350: Notes:
9351: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9353: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9354: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9356: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9357: @*/
9358: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9359: {
9360: PetscFunctionBegin;
9362: PetscAssertPointer(flg, 2);
9363: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9364: *flg = PetscBool3ToBool(A->structurally_symmetric);
9365: } else {
9366: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9367: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9368: }
9369: PetscFunctionReturn(PETSC_SUCCESS);
9370: }
9372: /*@
9373: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9375: Not Collective
9377: Input Parameter:
9378: . A - the matrix to check
9380: Output Parameters:
9381: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9382: - flg - the result (only valid if set is PETSC_TRUE)
9384: Level: advanced
9386: Notes:
9387: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9388: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9390: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9392: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9393: @*/
9394: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9395: {
9396: PetscFunctionBegin;
9398: PetscAssertPointer(set, 2);
9399: PetscAssertPointer(flg, 3);
9400: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9401: *set = PETSC_TRUE;
9402: *flg = PetscBool3ToBool(A->structurally_symmetric);
9403: } else {
9404: *set = PETSC_FALSE;
9405: }
9406: PetscFunctionReturn(PETSC_SUCCESS);
9407: }
9409: /*@
9410: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9411: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9413: Not Collective
9415: Input Parameter:
9416: . mat - the matrix
9418: Output Parameters:
9419: + nstash - the size of the stash
9420: . reallocs - the number of additional mallocs incurred.
9421: . bnstash - the size of the block stash
9422: - breallocs - the number of additional mallocs incurred.in the block stash
9424: Level: advanced
9426: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9427: @*/
9428: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9429: {
9430: PetscFunctionBegin;
9431: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9432: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9433: PetscFunctionReturn(PETSC_SUCCESS);
9434: }
9436: /*@C
9437: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9438: parallel layout, `PetscLayout` for rows and columns
9440: Collective
9442: Input Parameter:
9443: . mat - the matrix
9445: Output Parameters:
9446: + right - (optional) vector that the matrix can be multiplied against
9447: - left - (optional) vector that the matrix vector product can be stored in
9449: Level: advanced
9451: Notes:
9452: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9454: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9456: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9457: @*/
9458: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9459: {
9460: PetscFunctionBegin;
9463: if (mat->ops->getvecs) {
9464: PetscUseTypeMethod(mat, getvecs, right, left);
9465: } else {
9466: if (right) {
9467: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9468: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9469: PetscCall(VecSetType(*right, mat->defaultvectype));
9470: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9471: if (mat->boundtocpu && mat->bindingpropagates) {
9472: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9473: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9474: }
9475: #endif
9476: }
9477: if (left) {
9478: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9479: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9480: PetscCall(VecSetType(*left, mat->defaultvectype));
9481: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9482: if (mat->boundtocpu && mat->bindingpropagates) {
9483: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9484: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9485: }
9486: #endif
9487: }
9488: }
9489: PetscFunctionReturn(PETSC_SUCCESS);
9490: }
9492: /*@C
9493: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9494: with default values.
9496: Not Collective
9498: Input Parameter:
9499: . info - the `MatFactorInfo` data structure
9501: Level: developer
9503: Notes:
9504: The solvers are generally used through the `KSP` and `PC` objects, for example
9505: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9507: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9509: Developer Notes:
9510: The Fortran interface is not autogenerated as the
9511: interface definition cannot be generated correctly [due to `MatFactorInfo`]
9513: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9514: @*/
9515: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9516: {
9517: PetscFunctionBegin;
9518: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9519: PetscFunctionReturn(PETSC_SUCCESS);
9520: }
9522: /*@
9523: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9525: Collective
9527: Input Parameters:
9528: + mat - the factored matrix
9529: - is - the index set defining the Schur indices (0-based)
9531: Level: advanced
9533: Notes:
9534: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9536: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9538: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9540: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9541: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9542: @*/
9543: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9544: {
9545: PetscErrorCode (*f)(Mat, IS);
9547: PetscFunctionBegin;
9552: PetscCheckSameComm(mat, 1, is, 2);
9553: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9554: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9555: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9556: PetscCall(MatDestroy(&mat->schur));
9557: PetscCall((*f)(mat, is));
9558: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9559: PetscFunctionReturn(PETSC_SUCCESS);
9560: }
9562: /*@
9563: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9565: Logically Collective
9567: Input Parameters:
9568: + F - the factored matrix obtained by calling `MatGetFactor()`
9569: . S - location where to return the Schur complement, can be `NULL`
9570: - status - the status of the Schur complement matrix, can be `NULL`
9572: Level: advanced
9574: Notes:
9575: You must call `MatFactorSetSchurIS()` before calling this routine.
9577: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9579: The routine provides a copy of the Schur matrix stored within the solver data structures.
9580: The caller must destroy the object when it is no longer needed.
9581: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9583: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9585: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9587: Developer Notes:
9588: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9589: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9591: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9592: @*/
9593: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9594: {
9595: PetscFunctionBegin;
9597: if (S) PetscAssertPointer(S, 2);
9598: if (status) PetscAssertPointer(status, 3);
9599: if (S) {
9600: PetscErrorCode (*f)(Mat, Mat *);
9602: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9603: if (f) {
9604: PetscCall((*f)(F, S));
9605: } else {
9606: PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9607: }
9608: }
9609: if (status) *status = F->schur_status;
9610: PetscFunctionReturn(PETSC_SUCCESS);
9611: }
9613: /*@
9614: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9616: Logically Collective
9618: Input Parameters:
9619: + F - the factored matrix obtained by calling `MatGetFactor()`
9620: . S - location where to return the Schur complement, can be `NULL`
9621: - status - the status of the Schur complement matrix, can be `NULL`
9623: Level: advanced
9625: Notes:
9626: You must call `MatFactorSetSchurIS()` before calling this routine.
9628: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9630: The routine returns a the Schur Complement stored within the data structures of the solver.
9632: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9634: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9636: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9638: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9640: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9641: @*/
9642: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9643: {
9644: PetscFunctionBegin;
9646: if (S) {
9647: PetscAssertPointer(S, 2);
9648: *S = F->schur;
9649: }
9650: if (status) {
9651: PetscAssertPointer(status, 3);
9652: *status = F->schur_status;
9653: }
9654: PetscFunctionReturn(PETSC_SUCCESS);
9655: }
9657: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9658: {
9659: Mat S = F->schur;
9661: PetscFunctionBegin;
9662: switch (F->schur_status) {
9663: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9664: case MAT_FACTOR_SCHUR_INVERTED:
9665: if (S) {
9666: S->ops->solve = NULL;
9667: S->ops->matsolve = NULL;
9668: S->ops->solvetranspose = NULL;
9669: S->ops->matsolvetranspose = NULL;
9670: S->ops->solveadd = NULL;
9671: S->ops->solvetransposeadd = NULL;
9672: S->factortype = MAT_FACTOR_NONE;
9673: PetscCall(PetscFree(S->solvertype));
9674: }
9675: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9676: break;
9677: default:
9678: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9679: }
9680: PetscFunctionReturn(PETSC_SUCCESS);
9681: }
9683: /*@
9684: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9686: Logically Collective
9688: Input Parameters:
9689: + F - the factored matrix obtained by calling `MatGetFactor()`
9690: . S - location where the Schur complement is stored
9691: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9693: Level: advanced
9695: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9696: @*/
9697: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9698: {
9699: PetscFunctionBegin;
9701: if (S) {
9703: *S = NULL;
9704: }
9705: F->schur_status = status;
9706: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9707: PetscFunctionReturn(PETSC_SUCCESS);
9708: }
9710: /*@
9711: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9713: Logically Collective
9715: Input Parameters:
9716: + F - the factored matrix obtained by calling `MatGetFactor()`
9717: . rhs - location where the right hand side of the Schur complement system is stored
9718: - sol - location where the solution of the Schur complement system has to be returned
9720: Level: advanced
9722: Notes:
9723: The sizes of the vectors should match the size of the Schur complement
9725: Must be called after `MatFactorSetSchurIS()`
9727: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9728: @*/
9729: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9730: {
9731: PetscFunctionBegin;
9738: PetscCheckSameComm(F, 1, rhs, 2);
9739: PetscCheckSameComm(F, 1, sol, 3);
9740: PetscCall(MatFactorFactorizeSchurComplement(F));
9741: switch (F->schur_status) {
9742: case MAT_FACTOR_SCHUR_FACTORED:
9743: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
9744: break;
9745: case MAT_FACTOR_SCHUR_INVERTED:
9746: PetscCall(MatMultTranspose(F->schur, rhs, sol));
9747: break;
9748: default:
9749: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9750: }
9751: PetscFunctionReturn(PETSC_SUCCESS);
9752: }
9754: /*@
9755: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
9757: Logically Collective
9759: Input Parameters:
9760: + F - the factored matrix obtained by calling `MatGetFactor()`
9761: . rhs - location where the right hand side of the Schur complement system is stored
9762: - sol - location where the solution of the Schur complement system has to be returned
9764: Level: advanced
9766: Notes:
9767: The sizes of the vectors should match the size of the Schur complement
9769: Must be called after `MatFactorSetSchurIS()`
9771: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
9772: @*/
9773: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
9774: {
9775: PetscFunctionBegin;
9782: PetscCheckSameComm(F, 1, rhs, 2);
9783: PetscCheckSameComm(F, 1, sol, 3);
9784: PetscCall(MatFactorFactorizeSchurComplement(F));
9785: switch (F->schur_status) {
9786: case MAT_FACTOR_SCHUR_FACTORED:
9787: PetscCall(MatSolve(F->schur, rhs, sol));
9788: break;
9789: case MAT_FACTOR_SCHUR_INVERTED:
9790: PetscCall(MatMult(F->schur, rhs, sol));
9791: break;
9792: default:
9793: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9794: }
9795: PetscFunctionReturn(PETSC_SUCCESS);
9796: }
9798: PETSC_EXTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
9799: #if PetscDefined(HAVE_CUDA)
9800: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
9801: #endif
9803: /* Schur status updated in the interface */
9804: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
9805: {
9806: Mat S = F->schur;
9808: PetscFunctionBegin;
9809: if (S) {
9810: PetscMPIInt size;
9811: PetscBool isdense, isdensecuda;
9813: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
9814: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
9815: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
9816: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
9817: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
9818: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
9819: if (isdense) {
9820: PetscCall(MatSeqDenseInvertFactors_Private(S));
9821: } else if (isdensecuda) {
9822: #if defined(PETSC_HAVE_CUDA)
9823: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
9824: #endif
9825: }
9826: // HIP??????????????
9827: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
9828: }
9829: PetscFunctionReturn(PETSC_SUCCESS);
9830: }
9832: /*@
9833: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
9835: Logically Collective
9837: Input Parameter:
9838: . F - the factored matrix obtained by calling `MatGetFactor()`
9840: Level: advanced
9842: Notes:
9843: Must be called after `MatFactorSetSchurIS()`.
9845: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
9847: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
9848: @*/
9849: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
9850: {
9851: PetscFunctionBegin;
9854: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
9855: PetscCall(MatFactorFactorizeSchurComplement(F));
9856: PetscCall(MatFactorInvertSchurComplement_Private(F));
9857: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
9858: PetscFunctionReturn(PETSC_SUCCESS);
9859: }
9861: /*@
9862: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
9864: Logically Collective
9866: Input Parameter:
9867: . F - the factored matrix obtained by calling `MatGetFactor()`
9869: Level: advanced
9871: Note:
9872: Must be called after `MatFactorSetSchurIS()`
9874: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
9875: @*/
9876: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
9877: {
9878: MatFactorInfo info;
9880: PetscFunctionBegin;
9883: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
9884: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
9885: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
9886: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
9887: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
9888: } else {
9889: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
9890: }
9891: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
9892: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
9893: PetscFunctionReturn(PETSC_SUCCESS);
9894: }
9896: /*@
9897: MatPtAP - Creates the matrix product C = P^T * A * P
9899: Neighbor-wise Collective
9901: Input Parameters:
9902: + A - the matrix
9903: . P - the projection matrix
9904: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
9905: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DEFAULT` if you do not have a good estimate
9906: if the result is a dense matrix this is irrelevant
9908: Output Parameter:
9909: . C - the product matrix
9911: Level: intermediate
9913: Notes:
9914: C will be created and must be destroyed by the user with `MatDestroy()`.
9916: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
9918: Developer Notes:
9919: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
9921: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
9922: @*/
9923: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
9924: {
9925: PetscFunctionBegin;
9926: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
9927: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
9929: if (scall == MAT_INITIAL_MATRIX) {
9930: PetscCall(MatProductCreate(A, P, NULL, C));
9931: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
9932: PetscCall(MatProductSetAlgorithm(*C, "default"));
9933: PetscCall(MatProductSetFill(*C, fill));
9935: (*C)->product->api_user = PETSC_TRUE;
9936: PetscCall(MatProductSetFromOptions(*C));
9937: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)(*C)), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
9938: PetscCall(MatProductSymbolic(*C));
9939: } else { /* scall == MAT_REUSE_MATRIX */
9940: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
9941: }
9943: PetscCall(MatProductNumeric(*C));
9944: (*C)->symmetric = A->symmetric;
9945: (*C)->spd = A->spd;
9946: PetscFunctionReturn(PETSC_SUCCESS);
9947: }
9949: /*@
9950: MatRARt - Creates the matrix product C = R * A * R^T
9952: Neighbor-wise Collective
9954: Input Parameters:
9955: + A - the matrix
9956: . R - the projection matrix
9957: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
9958: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DEFAULT` if you do not have a good estimate
9959: if the result is a dense matrix this is irrelevant
9961: Output Parameter:
9962: . C - the product matrix
9964: Level: intermediate
9966: Notes:
9967: C will be created and must be destroyed by the user with `MatDestroy()`.
9969: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
9971: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
9972: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
9973: parallel MatRARt is implemented via explicit transpose of R, which could be very expensive.
9974: We recommend using MatPtAP().
9976: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
9977: @*/
9978: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
9979: {
9980: PetscFunctionBegin;
9981: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
9982: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
9984: if (scall == MAT_INITIAL_MATRIX) {
9985: PetscCall(MatProductCreate(A, R, NULL, C));
9986: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
9987: PetscCall(MatProductSetAlgorithm(*C, "default"));
9988: PetscCall(MatProductSetFill(*C, fill));
9990: (*C)->product->api_user = PETSC_TRUE;
9991: PetscCall(MatProductSetFromOptions(*C));
9992: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)(*C)), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
9993: PetscCall(MatProductSymbolic(*C));
9994: } else { /* scall == MAT_REUSE_MATRIX */
9995: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
9996: }
9998: PetscCall(MatProductNumeric(*C));
9999: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10000: PetscFunctionReturn(PETSC_SUCCESS);
10001: }
10003: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10004: {
10005: PetscFunctionBegin;
10006: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10008: if (scall == MAT_INITIAL_MATRIX) {
10009: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10010: PetscCall(MatProductCreate(A, B, NULL, C));
10011: PetscCall(MatProductSetType(*C, ptype));
10012: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10013: PetscCall(MatProductSetFill(*C, fill));
10015: (*C)->product->api_user = PETSC_TRUE;
10016: PetscCall(MatProductSetFromOptions(*C));
10017: PetscCall(MatProductSymbolic(*C));
10018: } else { /* scall == MAT_REUSE_MATRIX */
10019: Mat_Product *product = (*C)->product;
10020: PetscBool isdense;
10022: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)(*C), &isdense, MATSEQDENSE, MATMPIDENSE, ""));
10023: if (isdense && product && product->type != ptype) {
10024: PetscCall(MatProductClear(*C));
10025: product = NULL;
10026: }
10027: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10028: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10029: PetscCheck(isdense, PetscObjectComm((PetscObject)(*C)), PETSC_ERR_SUP, "Call MatProductCreate() first");
10030: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10031: product = (*C)->product;
10032: product->fill = fill;
10033: product->api_user = PETSC_TRUE;
10034: product->clear = PETSC_TRUE;
10036: PetscCall(MatProductSetType(*C, ptype));
10037: PetscCall(MatProductSetFromOptions(*C));
10038: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)(*C)), PETSC_ERR_SUP, "MatProduct %s not supported for %s and %s", MatProductTypes[ptype], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name);
10039: PetscCall(MatProductSymbolic(*C));
10040: } else { /* user may change input matrices A or B when REUSE */
10041: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10042: }
10043: }
10044: PetscCall(MatProductNumeric(*C));
10045: PetscFunctionReturn(PETSC_SUCCESS);
10046: }
10048: /*@
10049: MatMatMult - Performs matrix-matrix multiplication C=A*B.
10051: Neighbor-wise Collective
10053: Input Parameters:
10054: + A - the left matrix
10055: . B - the right matrix
10056: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10057: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if you do not have a good estimate
10058: if the result is a dense matrix this is irrelevant
10060: Output Parameter:
10061: . C - the product matrix
10063: Notes:
10064: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10066: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10067: call to this function with `MAT_INITIAL_MATRIX`.
10069: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value actually needed.
10071: In the special case where matrix B (and hence C) are dense you can create the correctly sized matrix C yourself and then call this routine with `MAT_REUSE_MATRIX`,
10072: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix C is sparse.
10074: Example of Usage:
10075: .vb
10076: MatProductCreate(A,B,NULL,&C);
10077: MatProductSetType(C,MATPRODUCT_AB);
10078: MatProductSymbolic(C);
10079: MatProductNumeric(C); // compute C=A * B
10080: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10081: MatProductNumeric(C);
10082: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10083: MatProductNumeric(C);
10084: .ve
10086: Level: intermediate
10088: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10089: @*/
10090: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10091: {
10092: PetscFunctionBegin;
10093: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10094: PetscFunctionReturn(PETSC_SUCCESS);
10095: }
10097: /*@
10098: MatMatTransposeMult - Performs matrix-matrix multiplication C=A*B^T.
10100: Neighbor-wise Collective
10102: Input Parameters:
10103: + A - the left matrix
10104: . B - the right matrix
10105: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10106: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if not known
10108: Output Parameter:
10109: . C - the product matrix
10111: Level: intermediate
10113: Notes:
10114: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10116: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10118: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10119: actually needed.
10121: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10122: and for pairs of `MATMPIDENSE` matrices.
10124: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABt`
10126: Options Database Keys:
10127: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10128: first redundantly copies the transposed B matrix on each process and requires O(log P) communication complexity;
10129: the second never stores more than one portion of the B matrix at a time by requires O(P) communication complexity.
10131: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10132: @*/
10133: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10134: {
10135: PetscFunctionBegin;
10136: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10137: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10138: PetscFunctionReturn(PETSC_SUCCESS);
10139: }
10141: /*@
10142: MatTransposeMatMult - Performs matrix-matrix multiplication C=A^T*B.
10144: Neighbor-wise Collective
10146: Input Parameters:
10147: + A - the left matrix
10148: . B - the right matrix
10149: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10150: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if not known
10152: Output Parameter:
10153: . C - the product matrix
10155: Level: intermediate
10157: Notes:
10158: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10160: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call.
10162: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_AtB`
10164: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10165: actually needed.
10167: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10168: which inherit from `MATSEQAIJ`. C will be of the same type as the input matrices.
10170: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10171: @*/
10172: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10173: {
10174: PetscFunctionBegin;
10175: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10176: PetscFunctionReturn(PETSC_SUCCESS);
10177: }
10179: /*@
10180: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10182: Neighbor-wise Collective
10184: Input Parameters:
10185: + A - the left matrix
10186: . B - the middle matrix
10187: . C - the right matrix
10188: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10189: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DEFAULT` if you do not have a good estimate
10190: if the result is a dense matrix this is irrelevant
10192: Output Parameter:
10193: . D - the product matrix
10195: Level: intermediate
10197: Notes:
10198: Unless scall is `MAT_REUSE_MATRIX` D will be created.
10200: `MAT_REUSE_MATRIX` can only be used if the matrices A, B and C have the same nonzero pattern as in the previous call
10202: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABC`
10204: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10205: actually needed.
10207: If you have many matrices with the same non-zero structure to multiply, you
10208: should use `MAT_REUSE_MATRIX` in all calls but the first
10210: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10211: @*/
10212: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10213: {
10214: PetscFunctionBegin;
10215: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10216: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10218: if (scall == MAT_INITIAL_MATRIX) {
10219: PetscCall(MatProductCreate(A, B, C, D));
10220: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10221: PetscCall(MatProductSetAlgorithm(*D, "default"));
10222: PetscCall(MatProductSetFill(*D, fill));
10224: (*D)->product->api_user = PETSC_TRUE;
10225: PetscCall(MatProductSetFromOptions(*D));
10226: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)(*D)), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10227: ((PetscObject)C)->type_name);
10228: PetscCall(MatProductSymbolic(*D));
10229: } else { /* user may change input matrices when REUSE */
10230: PetscCall(MatProductReplaceMats(A, B, C, *D));
10231: }
10232: PetscCall(MatProductNumeric(*D));
10233: PetscFunctionReturn(PETSC_SUCCESS);
10234: }
10236: /*@
10237: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10239: Collective
10241: Input Parameters:
10242: + mat - the matrix
10243: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10244: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10245: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10247: Output Parameter:
10248: . matredundant - redundant matrix
10250: Level: advanced
10252: Notes:
10253: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10254: original matrix has not changed from that last call to MatCreateRedundantMatrix().
10256: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10257: calling it.
10259: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10261: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10262: @*/
10263: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10264: {
10265: MPI_Comm comm;
10266: PetscMPIInt size;
10267: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10268: Mat_Redundant *redund = NULL;
10269: PetscSubcomm psubcomm = NULL;
10270: MPI_Comm subcomm_in = subcomm;
10271: Mat *matseq;
10272: IS isrow, iscol;
10273: PetscBool newsubcomm = PETSC_FALSE;
10275: PetscFunctionBegin;
10277: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10278: PetscAssertPointer(*matredundant, 5);
10280: }
10282: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10283: if (size == 1 || nsubcomm == 1) {
10284: if (reuse == MAT_INITIAL_MATRIX) {
10285: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10286: } else {
10287: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10288: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10289: }
10290: PetscFunctionReturn(PETSC_SUCCESS);
10291: }
10293: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10294: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10295: MatCheckPreallocated(mat, 1);
10297: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10298: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10299: /* create psubcomm, then get subcomm */
10300: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10301: PetscCallMPI(MPI_Comm_size(comm, &size));
10302: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10304: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10305: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10306: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10307: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10308: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10309: newsubcomm = PETSC_TRUE;
10310: PetscCall(PetscSubcommDestroy(&psubcomm));
10311: }
10313: /* get isrow, iscol and a local sequential matrix matseq[0] */
10314: if (reuse == MAT_INITIAL_MATRIX) {
10315: mloc_sub = PETSC_DECIDE;
10316: nloc_sub = PETSC_DECIDE;
10317: if (bs < 1) {
10318: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10319: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10320: } else {
10321: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10322: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10323: }
10324: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10325: rstart = rend - mloc_sub;
10326: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10327: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10328: PetscCall(ISSetIdentity(iscol));
10329: } else { /* reuse == MAT_REUSE_MATRIX */
10330: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10331: /* retrieve subcomm */
10332: PetscCall(PetscObjectGetComm((PetscObject)(*matredundant), &subcomm));
10333: redund = (*matredundant)->redundant;
10334: isrow = redund->isrow;
10335: iscol = redund->iscol;
10336: matseq = redund->matseq;
10337: }
10338: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10340: /* get matredundant over subcomm */
10341: if (reuse == MAT_INITIAL_MATRIX) {
10342: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10344: /* create a supporting struct and attach it to C for reuse */
10345: PetscCall(PetscNew(&redund));
10346: (*matredundant)->redundant = redund;
10347: redund->isrow = isrow;
10348: redund->iscol = iscol;
10349: redund->matseq = matseq;
10350: if (newsubcomm) {
10351: redund->subcomm = subcomm;
10352: } else {
10353: redund->subcomm = MPI_COMM_NULL;
10354: }
10355: } else {
10356: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10357: }
10358: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10359: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10360: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10361: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10362: }
10363: #endif
10364: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10365: PetscFunctionReturn(PETSC_SUCCESS);
10366: }
10368: /*@C
10369: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10370: a given `Mat`. Each submatrix can span multiple procs.
10372: Collective
10374: Input Parameters:
10375: + mat - the matrix
10376: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10377: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10379: Output Parameter:
10380: . subMat - parallel sub-matrices each spanning a given `subcomm`
10382: Level: advanced
10384: Notes:
10385: The submatrix partition across processors is dictated by `subComm` a
10386: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10387: is not restricted to be grouped with consecutive original ranks.
10389: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10390: map directly to the layout of the original matrix [wrt the local
10391: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10392: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10393: the `subMat`. However the offDiagMat looses some columns - and this is
10394: reconstructed with `MatSetValues()`
10396: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10398: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10399: @*/
10400: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10401: {
10402: PetscMPIInt commsize, subCommSize;
10404: PetscFunctionBegin;
10405: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10406: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10407: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10409: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10410: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10411: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10412: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10413: PetscFunctionReturn(PETSC_SUCCESS);
10414: }
10416: /*@
10417: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10419: Not Collective
10421: Input Parameters:
10422: + mat - matrix to extract local submatrix from
10423: . isrow - local row indices for submatrix
10424: - iscol - local column indices for submatrix
10426: Output Parameter:
10427: . submat - the submatrix
10429: Level: intermediate
10431: Notes:
10432: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10434: Depending on the format of `mat`, the returned submat may not implement `MatMult()`. Its communicator may be
10435: the same as mat, it may be `PETSC_COMM_SELF`, or some other subcomm of `mat`'s.
10437: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10438: `MatSetValuesBlockedLocal()` will also be implemented.
10440: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10441: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10443: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10444: @*/
10445: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10446: {
10447: PetscFunctionBegin;
10451: PetscCheckSameComm(isrow, 2, iscol, 3);
10452: PetscAssertPointer(submat, 4);
10453: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10455: if (mat->ops->getlocalsubmatrix) {
10456: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10457: } else {
10458: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10459: }
10460: PetscFunctionReturn(PETSC_SUCCESS);
10461: }
10463: /*@
10464: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10466: Not Collective
10468: Input Parameters:
10469: + mat - matrix to extract local submatrix from
10470: . isrow - local row indices for submatrix
10471: . iscol - local column indices for submatrix
10472: - submat - the submatrix
10474: Level: intermediate
10476: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10477: @*/
10478: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10479: {
10480: PetscFunctionBegin;
10484: PetscCheckSameComm(isrow, 2, iscol, 3);
10485: PetscAssertPointer(submat, 4);
10488: if (mat->ops->restorelocalsubmatrix) {
10489: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10490: } else {
10491: PetscCall(MatDestroy(submat));
10492: }
10493: *submat = NULL;
10494: PetscFunctionReturn(PETSC_SUCCESS);
10495: }
10497: /*@
10498: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10500: Collective
10502: Input Parameter:
10503: . mat - the matrix
10505: Output Parameter:
10506: . is - if any rows have zero diagonals this contains the list of them
10508: Level: developer
10510: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10511: @*/
10512: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10513: {
10514: PetscFunctionBegin;
10517: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10518: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10520: if (!mat->ops->findzerodiagonals) {
10521: Vec diag;
10522: const PetscScalar *a;
10523: PetscInt *rows;
10524: PetscInt rStart, rEnd, r, nrow = 0;
10526: PetscCall(MatCreateVecs(mat, &diag, NULL));
10527: PetscCall(MatGetDiagonal(mat, diag));
10528: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10529: PetscCall(VecGetArrayRead(diag, &a));
10530: for (r = 0; r < rEnd - rStart; ++r)
10531: if (a[r] == 0.0) ++nrow;
10532: PetscCall(PetscMalloc1(nrow, &rows));
10533: nrow = 0;
10534: for (r = 0; r < rEnd - rStart; ++r)
10535: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10536: PetscCall(VecRestoreArrayRead(diag, &a));
10537: PetscCall(VecDestroy(&diag));
10538: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10539: } else {
10540: PetscUseTypeMethod(mat, findzerodiagonals, is);
10541: }
10542: PetscFunctionReturn(PETSC_SUCCESS);
10543: }
10545: /*@
10546: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10548: Collective
10550: Input Parameter:
10551: . mat - the matrix
10553: Output Parameter:
10554: . is - contains the list of rows with off block diagonal entries
10556: Level: developer
10558: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10559: @*/
10560: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10561: {
10562: PetscFunctionBegin;
10565: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10566: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10568: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10569: PetscFunctionReturn(PETSC_SUCCESS);
10570: }
10572: /*@C
10573: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10575: Collective; No Fortran Support
10577: Input Parameter:
10578: . mat - the matrix
10580: Output Parameter:
10581: . values - the block inverses in column major order (FORTRAN-like)
10583: Level: advanced
10585: Notes:
10586: The size of the blocks is determined by the block size of the matrix.
10588: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10590: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10592: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10593: @*/
10594: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar **values)
10595: {
10596: PetscFunctionBegin;
10598: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10599: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10600: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10601: PetscFunctionReturn(PETSC_SUCCESS);
10602: }
10604: /*@C
10605: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10607: Collective; No Fortran Support
10609: Input Parameters:
10610: + mat - the matrix
10611: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10612: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10614: Output Parameter:
10615: . values - the block inverses in column major order (FORTRAN-like)
10617: Level: advanced
10619: Notes:
10620: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10622: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10624: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10625: @*/
10626: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt *bsizes, PetscScalar *values)
10627: {
10628: PetscFunctionBegin;
10630: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10631: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10632: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10633: PetscFunctionReturn(PETSC_SUCCESS);
10634: }
10636: /*@
10637: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10639: Collective
10641: Input Parameters:
10642: + A - the matrix
10643: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10645: Level: advanced
10647: Note:
10648: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10650: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10651: @*/
10652: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10653: {
10654: const PetscScalar *vals;
10655: PetscInt *dnnz;
10656: PetscInt m, rstart, rend, bs, i, j;
10658: PetscFunctionBegin;
10659: PetscCall(MatInvertBlockDiagonal(A, &vals));
10660: PetscCall(MatGetBlockSize(A, &bs));
10661: PetscCall(MatGetLocalSize(A, &m, NULL));
10662: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10663: PetscCall(PetscMalloc1(m / bs, &dnnz));
10664: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10665: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10666: PetscCall(PetscFree(dnnz));
10667: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10668: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10669: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10670: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10671: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10672: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10673: PetscFunctionReturn(PETSC_SUCCESS);
10674: }
10676: /*@C
10677: MatTransposeColoringDestroy - Destroys a coloring context for matrix product C=A*B^T that was created
10678: via `MatTransposeColoringCreate()`.
10680: Collective
10682: Input Parameter:
10683: . c - coloring context
10685: Level: intermediate
10687: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10688: @*/
10689: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10690: {
10691: MatTransposeColoring matcolor = *c;
10693: PetscFunctionBegin;
10694: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10695: if (--((PetscObject)matcolor)->refct > 0) {
10696: matcolor = NULL;
10697: PetscFunctionReturn(PETSC_SUCCESS);
10698: }
10700: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10701: PetscCall(PetscFree(matcolor->rows));
10702: PetscCall(PetscFree(matcolor->den2sp));
10703: PetscCall(PetscFree(matcolor->colorforcol));
10704: PetscCall(PetscFree(matcolor->columns));
10705: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
10706: PetscCall(PetscHeaderDestroy(c));
10707: PetscFunctionReturn(PETSC_SUCCESS);
10708: }
10710: /*@C
10711: MatTransColoringApplySpToDen - Given a symbolic matrix product C=A*B^T for which
10712: a `MatTransposeColoring` context has been created, computes a dense B^T by applying
10713: `MatTransposeColoring` to sparse B.
10715: Collective
10717: Input Parameters:
10718: + coloring - coloring context created with `MatTransposeColoringCreate()`
10719: - B - sparse matrix
10721: Output Parameter:
10722: . Btdense - dense matrix B^T
10724: Level: developer
10726: Note:
10727: These are used internally for some implementations of `MatRARt()`
10729: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10730: @*/
10731: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
10732: {
10733: PetscFunctionBegin;
10738: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
10739: PetscFunctionReturn(PETSC_SUCCESS);
10740: }
10742: /*@C
10743: MatTransColoringApplyDenToSp - Given a symbolic matrix product Csp=A*B^T for which
10744: a `MatTransposeColoring` context has been created and a dense matrix Cden=A*Btdense
10745: in which Btdens is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
10746: `Csp` from `Cden`.
10748: Collective
10750: Input Parameters:
10751: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
10752: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
10754: Output Parameter:
10755: . Csp - sparse matrix
10757: Level: developer
10759: Note:
10760: These are used internally for some implementations of `MatRARt()`
10762: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
10763: @*/
10764: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
10765: {
10766: PetscFunctionBegin;
10771: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
10772: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
10773: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
10774: PetscFunctionReturn(PETSC_SUCCESS);
10775: }
10777: /*@C
10778: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product C=A*B^T.
10780: Collective
10782: Input Parameters:
10783: + mat - the matrix product C
10784: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
10786: Output Parameter:
10787: . color - the new coloring context
10789: Level: intermediate
10791: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
10792: `MatTransColoringApplyDenToSp()`
10793: @*/
10794: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
10795: {
10796: MatTransposeColoring c;
10797: MPI_Comm comm;
10799: PetscFunctionBegin;
10800: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10801: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10802: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
10804: c->ctype = iscoloring->ctype;
10805: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
10807: *color = c;
10808: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10809: PetscFunctionReturn(PETSC_SUCCESS);
10810: }
10812: /*@
10813: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
10814: matrix has had no new nonzero locations added to (or removed from) the matrix since the previous call then the value will be the
10815: same, otherwise it will be larger
10817: Not Collective
10819: Input Parameter:
10820: . mat - the matrix
10822: Output Parameter:
10823: . state - the current state
10825: Level: intermediate
10827: Notes:
10828: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
10829: different matrices
10831: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
10833: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
10835: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
10836: @*/
10837: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
10838: {
10839: PetscFunctionBegin;
10841: *state = mat->nonzerostate;
10842: PetscFunctionReturn(PETSC_SUCCESS);
10843: }
10845: /*@
10846: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
10847: matrices from each processor
10849: Collective
10851: Input Parameters:
10852: + comm - the communicators the parallel matrix will live on
10853: . seqmat - the input sequential matrices
10854: . n - number of local columns (or `PETSC_DECIDE`)
10855: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10857: Output Parameter:
10858: . mpimat - the parallel matrix generated
10860: Level: developer
10862: Note:
10863: The number of columns of the matrix in EACH processor MUST be the same.
10865: .seealso: [](ch_matrices), `Mat`
10866: @*/
10867: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
10868: {
10869: PetscMPIInt size;
10871: PetscFunctionBegin;
10872: PetscCallMPI(MPI_Comm_size(comm, &size));
10873: if (size == 1) {
10874: if (reuse == MAT_INITIAL_MATRIX) {
10875: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
10876: } else {
10877: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
10878: }
10879: PetscFunctionReturn(PETSC_SUCCESS);
10880: }
10882: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10884: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
10885: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
10886: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
10887: PetscFunctionReturn(PETSC_SUCCESS);
10888: }
10890: /*@
10891: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI ranks' ownership ranges.
10893: Collective
10895: Input Parameters:
10896: + A - the matrix to create subdomains from
10897: - N - requested number of subdomains
10899: Output Parameters:
10900: + n - number of subdomains resulting on this MPI process
10901: - iss - `IS` list with indices of subdomains on this MPI process
10903: Level: advanced
10905: Note:
10906: The number of subdomains must be smaller than the communicator size
10908: .seealso: [](ch_matrices), `Mat`, `IS`
10909: @*/
10910: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
10911: {
10912: MPI_Comm comm, subcomm;
10913: PetscMPIInt size, rank, color;
10914: PetscInt rstart, rend, k;
10916: PetscFunctionBegin;
10917: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
10918: PetscCallMPI(MPI_Comm_size(comm, &size));
10919: PetscCallMPI(MPI_Comm_rank(comm, &rank));
10920: PetscCheck(N >= 1 && N < (PetscInt)size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
10921: *n = 1;
10922: k = ((PetscInt)size) / N + ((PetscInt)size % N > 0); /* There are up to k ranks to a color */
10923: color = rank / k;
10924: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
10925: PetscCall(PetscMalloc1(1, iss));
10926: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
10927: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
10928: PetscCallMPI(MPI_Comm_free(&subcomm));
10929: PetscFunctionReturn(PETSC_SUCCESS);
10930: }
10932: /*@
10933: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
10935: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
10936: If they are not the same, uses `MatMatMatMult()`.
10938: Once the coarse grid problem is constructed, correct for interpolation operators
10939: that are not of full rank, which can legitimately happen in the case of non-nested
10940: geometric multigrid.
10942: Input Parameters:
10943: + restrct - restriction operator
10944: . dA - fine grid matrix
10945: . interpolate - interpolation operator
10946: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10947: - fill - expected fill, use `PETSC_DEFAULT` if you do not have a good estimate
10949: Output Parameter:
10950: . A - the Galerkin coarse matrix
10952: Options Database Key:
10953: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
10955: Level: developer
10957: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
10958: @*/
10959: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
10960: {
10961: IS zerorows;
10962: Vec diag;
10964: PetscFunctionBegin;
10965: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10966: /* Construct the coarse grid matrix */
10967: if (interpolate == restrct) {
10968: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
10969: } else {
10970: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
10971: }
10973: /* If the interpolation matrix is not of full rank, A will have zero rows.
10974: This can legitimately happen in the case of non-nested geometric multigrid.
10975: In that event, we set the rows of the matrix to the rows of the identity,
10976: ignoring the equations (as the RHS will also be zero). */
10978: PetscCall(MatFindZeroRows(*A, &zerorows));
10980: if (zerorows != NULL) { /* if there are any zero rows */
10981: PetscCall(MatCreateVecs(*A, &diag, NULL));
10982: PetscCall(MatGetDiagonal(*A, diag));
10983: PetscCall(VecISSet(diag, zerorows, 1.0));
10984: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
10985: PetscCall(VecDestroy(&diag));
10986: PetscCall(ISDestroy(&zerorows));
10987: }
10988: PetscFunctionReturn(PETSC_SUCCESS);
10989: }
10991: /*@C
10992: MatSetOperation - Allows user to set a matrix operation for any matrix type
10994: Logically Collective
10996: Input Parameters:
10997: + mat - the matrix
10998: . op - the name of the operation
10999: - f - the function that provides the operation
11001: Level: developer
11003: Example Usage:
11004: .vb
11005: extern PetscErrorCode usermult(Mat, Vec, Vec);
11007: PetscCall(MatCreateXXX(comm, ..., &A));
11008: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscVoidFunction)usermult));
11009: .ve
11011: Notes:
11012: See the file `include/petscmat.h` for a complete list of matrix
11013: operations, which all have the form MATOP_<OPERATION>, where
11014: <OPERATION> is the name (in all capital letters) of the
11015: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11017: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11018: sequence as the usual matrix interface routines, since they
11019: are intended to be accessed via the usual matrix interface
11020: routines, e.g.,
11021: .vb
11022: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11023: .ve
11025: In particular each function MUST return `PETSC_SUCCESS` on success and
11026: nonzero on failure.
11028: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11030: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11031: @*/
11032: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, void (*f)(void))
11033: {
11034: PetscFunctionBegin;
11036: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))(mat->ops->view)) mat->ops->viewnative = mat->ops->view;
11037: (((void (**)(void))mat->ops)[op]) = f;
11038: PetscFunctionReturn(PETSC_SUCCESS);
11039: }
11041: /*@C
11042: MatGetOperation - Gets a matrix operation for any matrix type.
11044: Not Collective
11046: Input Parameters:
11047: + mat - the matrix
11048: - op - the name of the operation
11050: Output Parameter:
11051: . f - the function that provides the operation
11053: Level: developer
11055: Example Usage:
11056: .vb
11057: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11059: MatGetOperation(A, MATOP_MULT, (void (**)(void))&usermult);
11060: .ve
11062: Notes:
11063: See the file include/petscmat.h for a complete list of matrix
11064: operations, which all have the form MATOP_<OPERATION>, where
11065: <OPERATION> is the name (in all capital letters) of the
11066: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11068: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11070: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11071: @*/
11072: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, void (**f)(void))
11073: {
11074: PetscFunctionBegin;
11076: *f = (((void (**)(void))mat->ops)[op]);
11077: PetscFunctionReturn(PETSC_SUCCESS);
11078: }
11080: /*@
11081: MatHasOperation - Determines whether the given matrix supports the particular operation.
11083: Not Collective
11085: Input Parameters:
11086: + mat - the matrix
11087: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11089: Output Parameter:
11090: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11092: Level: advanced
11094: Note:
11095: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11097: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11098: @*/
11099: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11100: {
11101: PetscFunctionBegin;
11103: PetscAssertPointer(has, 3);
11104: if (mat->ops->hasoperation) {
11105: PetscUseTypeMethod(mat, hasoperation, op, has);
11106: } else {
11107: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11108: else {
11109: *has = PETSC_FALSE;
11110: if (op == MATOP_CREATE_SUBMATRIX) {
11111: PetscMPIInt size;
11113: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11114: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11115: }
11116: }
11117: }
11118: PetscFunctionReturn(PETSC_SUCCESS);
11119: }
11121: /*@
11122: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11124: Collective
11126: Input Parameter:
11127: . mat - the matrix
11129: Output Parameter:
11130: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11132: Level: beginner
11134: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11135: @*/
11136: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11137: {
11138: PetscFunctionBegin;
11141: PetscAssertPointer(cong, 2);
11142: if (!mat->rmap || !mat->cmap) {
11143: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11144: PetscFunctionReturn(PETSC_SUCCESS);
11145: }
11146: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11147: PetscCall(PetscLayoutSetUp(mat->rmap));
11148: PetscCall(PetscLayoutSetUp(mat->cmap));
11149: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11150: if (*cong) mat->congruentlayouts = 1;
11151: else mat->congruentlayouts = 0;
11152: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11153: PetscFunctionReturn(PETSC_SUCCESS);
11154: }
11156: PetscErrorCode MatSetInf(Mat A)
11157: {
11158: PetscFunctionBegin;
11159: PetscUseTypeMethod(A, setinf);
11160: PetscFunctionReturn(PETSC_SUCCESS);
11161: }
11163: /*@C
11164: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11165: and possibly removes small values from the graph structure.
11167: Collective
11169: Input Parameters:
11170: + A - the matrix
11171: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11172: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11173: - filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11175: Output Parameter:
11176: . graph - the resulting graph
11178: Level: advanced
11180: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11181: @*/
11182: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, Mat *graph)
11183: {
11184: PetscFunctionBegin;
11188: PetscAssertPointer(graph, 5);
11189: PetscUseTypeMethod(A, creategraph, sym, scale, filter, graph);
11190: PetscFunctionReturn(PETSC_SUCCESS);
11191: }
11193: /*@
11194: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11195: meaning the same memory is used for the matrix, and no new memory is allocated.
11197: Collective
11199: Input Parameters:
11200: + A - the matrix
11201: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11203: Level: intermediate
11205: Developer Notes:
11206: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11207: of the arrays in the data structure are unneeded.
11209: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11210: @*/
11211: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11212: {
11213: PetscFunctionBegin;
11215: PetscUseTypeMethod(A, eliminatezeros, keep);
11216: PetscFunctionReturn(PETSC_SUCCESS);
11217: }