Actual source code: ex260.c

  1: static char help[] = "Tests that MatView() and MatLoad() work for MPIAIJ matrix with total nz > PETSC_INT_MAX\n\n";

  3: #include <petscmat.h>

  5: int main(int argc, char **args)
  6: {
  7:   PetscInt     n = 1000000, nzr = (PetscInt)((double)PETSC_INT_MAX) / (3.8 * n);
  8:   Mat          A;
  9:   PetscScalar *a;
 10:   PetscInt    *ii, *jd, *jo;
 11:   PetscMPIInt  rank, size;
 12:   PetscViewer  viewer;

 14:   PetscFunctionBeginUser;
 15:   PetscCall(PetscInitialize(&argc, &args, NULL, help));

 17:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 18:   PetscCheck(size == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Must run with two MPI ranks");
 19:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 20:   PetscCall(PetscMalloc4(n + 1, &ii, n * nzr, &jd, n * nzr, &jo, n * nzr, &a));
 21:   ii[0] = 0;
 22:   for (PetscInt i = 0; i < n; i++) {
 23:     ii[i + 1] = ii[i] + nzr;
 24:     for (PetscInt j = 0; j < nzr; j++) jd[ii[i] + j] = j;
 25:     if (rank == 0) {
 26:       for (PetscInt j = 0; j < nzr; j++) jo[ii[i] + j] = n + j - 1;
 27:     } else {
 28:       for (PetscInt j = 0; j < nzr; j++) jo[ii[i] + j] = j;
 29:     }
 30:   }
 31:   PetscCall(MatCreateMPIAIJWithSplitArrays(PETSC_COMM_WORLD, n, n, PETSC_DETERMINE, PETSC_DETERMINE, ii, jd, a, ii, jo, a, &A));
 32:   PetscCall(MatView(A, PETSC_VIEWER_BINARY_WORLD));
 33:   PetscCall(MatDestroy(&A));
 34:   PetscCall(PetscFree4(ii, jd, jo, a));

 36:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, "binaryoutput", FILE_MODE_READ, &viewer));
 37:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 38:   PetscCall(MatLoad(A, viewer));
 39:   PetscCall(PetscViewerDestroy(&viewer));
 40:   PetscCall(MatDestroy(&A));
 41:   PetscCall(PetscFinalize());
 42:   return 0;
 43: }

 45: /*TEST

 47:    test:
 48:       TODO: requires to much memory to run in the CI
 49:       nsize: 2

 51: TEST*/