Actual source code: ex303.c

  1: static char help[] = "Testing MatCreateMPIAIJWithSeqAIJ().\n\n";

  3: #include <petscmat.h>

  5: int main(int argc, char **argv)
  6: {
  7:   Mat             A, B;
  8:   PetscInt        i, j, column, M, N, m, n;
  9:   PetscInt       *oi, *oj, nd;
 10:   const PetscInt *garray;
 11:   PetscInt       *garray_h;
 12:   PetscScalar     value;
 13:   PetscScalar    *oa;
 14:   PetscRandom     rctx;
 15:   PetscBool       equal, done;
 16:   Mat             AA, AB;
 17:   PetscMPIInt     size, rank;
 18:   MatType         mat_type;

 20:   PetscFunctionBeginUser;
 21:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 22:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 23:   PetscCheck(size > 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Must run with 2 or more processes");
 24:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));

 26:   /* Create a mpiaij matrix for checking */
 27:   PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, 5, 5, PETSC_DECIDE, PETSC_DECIDE, 0, NULL, 0, NULL, &A));
 28:   PetscCall(MatSetFromOptions(A));
 29:   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE));
 30:   PetscCall(MatSetUp(A));
 31:   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
 32:   PetscCall(PetscRandomSetFromOptions(rctx));

 34:   for (i = 5 * rank; i < 5 * rank + 5; i++) {
 35:     for (j = 0; j < 5 * size; j++) {
 36:       PetscCall(PetscRandomGetValue(rctx, &value));
 37:       column = (PetscInt)(5 * size * PetscRealPart(value));
 38:       PetscCall(PetscRandomGetValue(rctx, &value));
 39:       PetscCall(MatSetValues(A, 1, &i, 1, &column, &value, INSERT_VALUES));
 40:     }
 41:   }
 42:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
 43:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
 44:   PetscCall(MatGetSize(A, &M, &N));
 45:   PetscCall(MatGetLocalSize(A, &m, &n));

 47:   PetscCall(MatMPIAIJGetSeqAIJ(A, &AA, &AB, &garray));

 49:   Mat output_mat_local, output_mat_nonlocal, output_mat_local_copy, output_mat_nonlocal_copy;

 51:   PetscCall(MatConvert(AA, MATSAME, MAT_INITIAL_MATRIX, &output_mat_local));
 52:   PetscCall(MatConvert(AB, MATSAME, MAT_INITIAL_MATRIX, &output_mat_nonlocal));
 53:   PetscCall(MatConvert(AA, MATSAME, MAT_INITIAL_MATRIX, &output_mat_local_copy));

 55:   // The garray passed in has to be on the host, but it can be created
 56:   // on device and copied to the host
 57:   // We're just going to copy the existing host values here
 58:   PetscInt nonlocalCols;
 59:   PetscCall(MatGetLocalSize(AB, NULL, &nonlocalCols));
 60:   PetscCall(PetscMalloc1(nonlocalCols, &garray_h));
 61:   for (int i = 0; i < nonlocalCols; i++) { garray_h[i] = garray[i]; }

 63:   // Build our MPI matrix
 64:   // If we provide garray and output_mat_nonlocal with local indices and the compactified size
 65:   // it doesn't compactify
 66:   PetscCall(MatCreateMPIAIJWithSeqAIJ(PETSC_COMM_WORLD, M, N, output_mat_local, output_mat_nonlocal, garray_h, &B));

 68:   PetscCall(MatEqual(A, B, &equal));
 69:   PetscCheck(equal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Likely a bug in MatCreateMPIAIJWithSeqAIJ()");
 70:   PetscCall(MatDestroy(&B));

 72:   // ~~~~~~~~~~~~~~~~~
 73:   // Test MatCreateMPIAIJWithSeqAIJ with compactification
 74:   // This is just for testing - would be silly to do this in practice
 75:   // ~~~~~~~~~~~~~~~~~
 76:   garray_h = NULL;
 77:   PetscCall(MatGetRowIJ(AB, 0, PETSC_FALSE, PETSC_FALSE, &nd, (const PetscInt **)&oi, (const PetscInt **)&oj, &done));
 78:   PetscCall(MatSeqAIJGetArray(AB, &oa));

 80:   // Create a version of AB of size N with global indices
 81:   PetscCall(MatGetType(AB, &mat_type));
 82:   PetscCall(MatCreate(PETSC_COMM_SELF, &output_mat_nonlocal_copy));
 83:   PetscCall(MatSetSizes(output_mat_nonlocal_copy, m, N, m, N));
 84:   PetscCall(MatSetType(output_mat_nonlocal_copy, mat_type));
 85:   PetscCall(MatSeqAIJSetPreallocation(output_mat_nonlocal_copy, oi[5], NULL));

 87:   // Fill the matrix
 88:   for (int i = 0; i < 5; i++) {
 89:     for (int j = 0; j < oi[i + 1] - oi[i]; j++) { PetscCall(MatSetValue(output_mat_nonlocal_copy, i, garray[oj[oi[i] + j]], oa[oi[i] + j], INSERT_VALUES)); }
 90:   }
 91:   PetscCall(MatAssemblyBegin(output_mat_nonlocal_copy, MAT_FINAL_ASSEMBLY));
 92:   PetscCall(MatAssemblyEnd(output_mat_nonlocal_copy, MAT_FINAL_ASSEMBLY));

 94:   PetscCall(MatRestoreRowIJ(AB, 0, PETSC_FALSE, PETSC_FALSE, &nd, (const PetscInt **)&oi, (const PetscInt **)&oj, &done));
 95:   PetscCall(MatSeqAIJRestoreArray(AB, &oa));

 97:   // Build our MPI matrix
 98:   // If we don't provide garray and output_mat_local_copy with global indices and size N
 99:   // it will do compactification
100:   PetscCall(MatCreateMPIAIJWithSeqAIJ(PETSC_COMM_WORLD, M, N, output_mat_local_copy, output_mat_nonlocal_copy, garray_h, &B));

102:   PetscCall(MatEqual(A, B, &equal));
103:   PetscCheck(equal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Likely a bug in MatCreateMPIAIJWithSeqAIJ()");
104:   PetscCall(MatDestroy(&B));

106:   // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

108:   /* Free spaces */
109:   PetscCall(PetscRandomDestroy(&rctx));
110:   PetscCall(MatDestroy(&A));
111:   PetscCall(PetscFinalize());
112:   return 0;
113: }

115: /*TEST

117:   test:
118:     nsize: 2
119:     args: -mat_type aij
120:     output_file: output/empty.out

122: TEST*/