Actual source code: ex40.c

  1: static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\
  2:   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
  3:   -nd <size>      : > 0  number of domains per processor \n\
  4:   -ov <overlap>   : >=0  amount of overlap between domains\n\n";

  6: #include <petscmat.h>

  8: PetscErrorCode ISAllGatherDisjoint(IS iis, IS **ois)
  9: {
 10:   IS             *is2, is;
 11:   const PetscInt *idxs;
 12:   PetscInt        i, ls, *sizes;
 13:   PetscMPIInt     size;

 15:   PetscFunctionBeginUser;
 16:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)iis), &size));
 17:   PetscCall(PetscMalloc1(size, &is2));
 18:   PetscCall(PetscMalloc1(size, &sizes));
 19:   PetscCall(ISGetLocalSize(iis, &ls));
 20:   /* we don't have a public ISGetLayout */
 21:   PetscCallMPI(MPI_Allgather(&ls, 1, MPIU_INT, sizes, 1, MPIU_INT, PetscObjectComm((PetscObject)iis)));
 22:   PetscCall(ISAllGather(iis, &is));
 23:   PetscCall(ISGetIndices(is, &idxs));
 24:   for (i = 0, ls = 0; i < size; i++) {
 25:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, sizes[i], idxs + ls, PETSC_COPY_VALUES, &is2[i]));
 26:     ls += sizes[i];
 27:   }
 28:   PetscCall(ISRestoreIndices(is, &idxs));
 29:   PetscCall(ISDestroy(&is));
 30:   PetscCall(PetscFree(sizes));
 31:   *ois = is2;
 32:   PetscFunctionReturn(PETSC_SUCCESS);
 33: }

 35: int main(int argc, char **args)
 36: {
 37:   PetscInt    nd = 2, ov = 1, ndpar, i, start, m, n, end, lsize;
 38:   PetscMPIInt rank;
 39:   PetscBool   flg, useND = PETSC_FALSE;
 40:   Mat         A, B;
 41:   char        file[PETSC_MAX_PATH_LEN];
 42:   PetscViewer fd;
 43:   IS         *is1, *is2;
 44:   PetscRandom r;
 45:   PetscScalar rand;

 47:   PetscFunctionBeginUser;
 48:   PetscCall(PetscInitialize(&argc, &args, NULL, help));
 49:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 50:   PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), &flg));
 51:   PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_USER, "Must use -f filename to indicate a file containing a PETSc binary matrix");
 52:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-nd", &nd, NULL));
 53:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ov", &ov, NULL));
 54:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-nested_dissection", &useND, NULL));

 56:   /* Read matrix */
 57:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));
 58:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 59:   PetscCall(MatSetType(A, MATMPIAIJ));
 60:   PetscCall(MatLoad(A, fd));
 61:   PetscCall(MatSetFromOptions(A));
 62:   PetscCall(PetscViewerDestroy(&fd));

 64:   /* Read the matrix again as a sequential matrix */
 65:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, file, FILE_MODE_READ, &fd));
 66:   PetscCall(MatCreate(PETSC_COMM_SELF, &B));
 67:   PetscCall(MatSetType(B, MATSEQAIJ));
 68:   PetscCall(MatLoad(B, fd));
 69:   PetscCall(MatSetFromOptions(B));
 70:   PetscCall(PetscViewerDestroy(&fd));

 72:   /* Create the IS corresponding to subdomains */
 73:   if (useND) {
 74:     MatPartitioning part;
 75:     IS              ndmap;
 76:     PetscMPIInt     size;

 78:     ndpar = 1;
 79:     PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 80:     nd = (PetscInt)size;
 81:     PetscCall(PetscMalloc1(ndpar, &is1));
 82:     PetscCall(MatPartitioningCreate(PETSC_COMM_WORLD, &part));
 83:     PetscCall(MatPartitioningSetAdjacency(part, A));
 84:     PetscCall(MatPartitioningSetFromOptions(part));
 85:     PetscCall(MatPartitioningApplyND(part, &ndmap));
 86:     PetscCall(MatPartitioningDestroy(&part));
 87:     PetscCall(ISBuildTwoSided(ndmap, NULL, &is1[0]));
 88:     PetscCall(ISDestroy(&ndmap));
 89:     PetscCall(ISAllGatherDisjoint(is1[0], &is2));
 90:   } else {
 91:     /* Create the random Index Sets */
 92:     PetscCall(PetscMalloc1(nd, &is1));
 93:     PetscCall(PetscMalloc1(nd, &is2));

 95:     PetscCall(MatGetSize(A, &m, &n));
 96:     PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
 97:     PetscCall(PetscRandomSetFromOptions(r));
 98:     for (i = 0; i < nd; i++) {
 99:       PetscCall(PetscRandomGetValue(r, &rand));
100:       start = (PetscInt)(rand * m);
101:       PetscCall(PetscRandomGetValue(r, &rand));
102:       end   = (PetscInt)(rand * m);
103:       lsize = end - start;
104:       if (start > end) {
105:         start = end;
106:         lsize = -lsize;
107:       }
108:       PetscCall(ISCreateStride(PETSC_COMM_SELF, lsize, start, 1, is1 + i));
109:       PetscCall(ISCreateStride(PETSC_COMM_SELF, lsize, start, 1, is2 + i));
110:     }
111:     ndpar = nd;
112:     PetscCall(PetscRandomDestroy(&r));
113:   }
114:   PetscCall(MatIncreaseOverlap(A, ndpar, is1, ov));
115:   PetscCall(MatIncreaseOverlap(B, nd, is2, ov));
116:   if (useND) {
117:     IS *is;

119:     PetscCall(ISAllGatherDisjoint(is1[0], &is));
120:     PetscCall(ISDestroy(&is1[0]));
121:     PetscCall(PetscFree(is1));
122:     is1 = is;
123:   }
124:   /* Now see if the serial and parallel case have the same answers */
125:   for (i = 0; i < nd; ++i) {
126:     PetscCall(ISEqual(is1[i], is2[i], &flg));
127:     if (!flg) {
128:       PetscCall(ISViewFromOptions(is1[i], NULL, "-err_view"));
129:       PetscCall(ISViewFromOptions(is2[i], NULL, "-err_view"));
130:       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "proc:[%d], i=%" PetscInt_FMT ", flg =%d", rank, i, (int)flg);
131:     }
132:   }

134:   /* Free allocated memory */
135:   for (i = 0; i < nd; ++i) {
136:     PetscCall(ISDestroy(&is1[i]));
137:     PetscCall(ISDestroy(&is2[i]));
138:   }
139:   PetscCall(PetscFree(is1));
140:   PetscCall(PetscFree(is2));
141:   PetscCall(MatDestroy(&A));
142:   PetscCall(MatDestroy(&B));
143:   PetscCall(PetscFinalize());
144:   return 0;
145: }

147: /*TEST

149:    build:
150:       requires: !complex

152:    testset:
153:       nsize: 5
154:       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
155:       args: -f ${DATAFILESPATH}/matrices/arco1 -viewer_binary_skip_info -ov 2
156:       output_file: output/ex40_1.out
157:       test:
158:         suffix: 1
159:         args: -nd 7
160:       test:
161:         requires: parmetis
162:         suffix: 1_nd
163:         args: -nested_dissection -mat_partitioning_type parmetis

165:    testset:
166:       nsize: 3
167:       requires: double !defined(PETSC_USE_64BIT_INDICES) !complex
168:       args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64 -mat_increase_overlap_scalable 1 -ov 2
169:       output_file: output/ex40_1.out
170:       test:
171:         suffix: 2
172:         args: -nd 7
173:       test:
174:         requires: parmetis
175:         suffix: 2_nd
176:         args: -nested_dissection -mat_partitioning_type parmetis

178: TEST*/