Actual source code: ex42.c

  1: static char help[] = "Tests MatIncreaseOverlap() and MatCreateSubmatrices() for the parallel case.\n\
  2: This example is similar to ex40.c; here the index sets used are random.\n\
  3: Input arguments are:\n\
  4:   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
  5:   -nd <size>      : > 0  no of domains per processor \n\
  6:   -ov <overlap>   : >=0  amount of overlap between domains\n\n";

  8: #include <petscmat.h>

 10: int main(int argc, char **args)
 11: {
 12:   PetscInt    nd = 2, ov = 1, i, j, lsize, m, n, *idx, bs;
 13:   PetscMPIInt rank, size;
 14:   PetscBool   flg;
 15:   Mat         A, B, *submatA, *submatB;
 16:   char        file[PETSC_MAX_PATH_LEN];
 17:   PetscViewer fd;
 18:   IS         *is1, *is2;
 19:   PetscRandom r;
 20:   PetscBool   test_unsorted = PETSC_FALSE;
 21:   PetscScalar rand;

 23:   PetscFunctionBeginUser;
 24:   PetscCall(PetscInitialize(&argc, &args, NULL, help));
 25:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 26:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 27:   PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), NULL));
 28:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-nd", &nd, NULL));
 29:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ov", &ov, NULL));
 30:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-test_unsorted", &test_unsorted, NULL));

 32:   /* Read matrix A and RHS */
 33:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));
 34:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 35:   PetscCall(MatSetType(A, MATAIJ));
 36:   PetscCall(MatSetFromOptions(A));
 37:   PetscCall(MatLoad(A, fd));
 38:   PetscCall(PetscViewerDestroy(&fd));

 40:   /* Read the same matrix as a seq matrix B */
 41:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, file, FILE_MODE_READ, &fd));
 42:   PetscCall(MatCreate(PETSC_COMM_SELF, &B));
 43:   PetscCall(MatSetType(B, MATSEQAIJ));
 44:   PetscCall(MatSetFromOptions(B));
 45:   PetscCall(MatLoad(B, fd));
 46:   PetscCall(PetscViewerDestroy(&fd));

 48:   PetscCall(MatGetBlockSize(A, &bs));

 50:   /* Create the Random no generator */
 51:   PetscCall(MatGetSize(A, &m, &n));
 52:   PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
 53:   PetscCall(PetscRandomSetFromOptions(r));

 55:   /* Create the IS corresponding to subdomains */
 56:   PetscCall(PetscMalloc1(nd, &is1));
 57:   PetscCall(PetscMalloc1(nd, &is2));
 58:   PetscCall(PetscMalloc1(m, &idx));
 59:   for (i = 0; i < m; i++) idx[i] = i;

 61:   /* Create the random Index Sets */
 62:   for (i = 0; i < nd; i++) {
 63:     /* Skip a few,so that the IS on different procs are different*/
 64:     for (j = 0; j < rank; j++) PetscCall(PetscRandomGetValue(r, &rand));
 65:     PetscCall(PetscRandomGetValue(r, &rand));
 66:     lsize = (PetscInt)(rand * (m / bs));
 67:     /* shuffle */
 68:     for (j = 0; j < lsize; j++) {
 69:       PetscInt k, swap, l;

 71:       PetscCall(PetscRandomGetValue(r, &rand));
 72:       k = j + (PetscInt)(rand * ((m / bs) - j));
 73:       for (l = 0; l < bs; l++) {
 74:         swap            = idx[bs * j + l];
 75:         idx[bs * j + l] = idx[bs * k + l];
 76:         idx[bs * k + l] = swap;
 77:       }
 78:     }
 79:     if (!test_unsorted) PetscCall(PetscSortInt(lsize * bs, idx));
 80:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, lsize * bs, idx, PETSC_COPY_VALUES, is1 + i));
 81:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, lsize * bs, idx, PETSC_COPY_VALUES, is2 + i));
 82:     PetscCall(ISSetBlockSize(is1[i], bs));
 83:     PetscCall(ISSetBlockSize(is2[i], bs));
 84:   }

 86:   if (!test_unsorted) {
 87:     PetscCall(MatIncreaseOverlap(A, nd, is1, ov));
 88:     PetscCall(MatIncreaseOverlap(B, nd, is2, ov));

 90:     for (i = 0; i < nd; ++i) {
 91:       PetscCall(ISSort(is1[i]));
 92:       PetscCall(ISSort(is2[i]));
 93:     }
 94:   }

 96:   PetscCall(MatCreateSubMatrices(A, nd, is1, is1, MAT_INITIAL_MATRIX, &submatA));
 97:   PetscCall(MatCreateSubMatrices(B, nd, is2, is2, MAT_INITIAL_MATRIX, &submatB));

 99:   /* Now see if the serial and parallel case have the same answers */
100:   for (i = 0; i < nd; ++i) {
101:     PetscCall(MatEqual(submatA[i], submatB[i], &flg));
102:     PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "%" PetscInt_FMT "-th parallel submatA != seq submatB", i);
103:   }

105:   /* Free Allocated Memory */
106:   for (i = 0; i < nd; ++i) {
107:     PetscCall(ISDestroy(&is1[i]));
108:     PetscCall(ISDestroy(&is2[i]));
109:   }
110:   PetscCall(MatDestroySubMatrices(nd, &submatA));
111:   PetscCall(MatDestroySubMatrices(nd, &submatB));

113:   PetscCall(PetscRandomDestroy(&r));
114:   PetscCall(PetscFree(is1));
115:   PetscCall(PetscFree(is2));
116:   PetscCall(MatDestroy(&A));
117:   PetscCall(MatDestroy(&B));
118:   PetscCall(PetscFree(idx));
119:   PetscCall(PetscFinalize());
120:   return 0;
121: }

123: /*TEST

125:    build:
126:       requires: !complex

128:    test:
129:       nsize: 3
130:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
131:       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 5 -ov 2

133:    test:
134:       suffix: 2
135:       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -ov 2
136:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex

138:    test:
139:       suffix: unsorted_baij_mpi
140:       nsize: 3
141:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
142:       args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted

144:    test:
145:       suffix: unsorted_baij_seq
146:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
147:       args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted

149:    test:
150:       suffix: unsorted_mpi
151:       nsize: 3
152:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
153:       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted

155:    test:
156:       suffix: unsorted_seq
157:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
158:       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted

160: TEST*/