Actual source code: ex211.c

  1: static char help[] = "Tests MatCreateSubmatrix() in parallel.";

  3: #include <petscmat.h>
  4: #include <../src/mat/impls/aij/mpi/mpiaij.h>

  6: PetscErrorCode ISGetSeqIS_SameColDist_Private(Mat mat, IS isrow, IS iscol, IS *isrow_d, IS *iscol_d, IS *iscol_o, const PetscInt *garray[])
  7: {
  8:   Vec             x, cmap;
  9:   const PetscInt *is_idx;
 10:   PetscScalar    *xarray, *cmaparray;
 11:   PetscInt        ncols, isstart, *idx, m, rstart, count;
 12:   Mat_MPIAIJ     *a    = (Mat_MPIAIJ *)mat->data;
 13:   Mat             B    = a->B;
 14:   Vec             lvec = a->lvec, lcmap;
 15:   PetscInt        i, cstart, cend, Bn = B->cmap->N;
 16:   MPI_Comm        comm;
 17:   PetscMPIInt     rank;
 18:   VecScatter      Mvctx;

 20:   PetscFunctionBegin;
 21:   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
 22:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 23:   PetscCall(ISGetLocalSize(iscol, &ncols));

 25:   /* (1) iscol is a sub-column vector of mat, pad it with '-1.' to form a full vector x */
 26:   PetscCall(MatCreateVecs(mat, &x, NULL));
 27:   PetscCall(VecDuplicate(x, &cmap));
 28:   PetscCall(VecSet(x, -1.0));
 29:   PetscCall(VecSet(cmap, -1.0));

 31:   PetscCall(VecDuplicate(lvec, &lcmap));

 33:   /* Get start indices */
 34:   PetscCallMPI(MPI_Scan(&ncols, &isstart, 1, MPIU_INT, MPI_SUM, comm));
 35:   isstart -= ncols;
 36:   PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));

 38:   PetscCall(ISGetIndices(iscol, &is_idx));
 39:   PetscCall(VecGetArray(x, &xarray));
 40:   PetscCall(VecGetArray(cmap, &cmaparray));
 41:   PetscCall(PetscMalloc1(ncols, &idx));
 42:   for (i = 0; i < ncols; i++) {
 43:     xarray[is_idx[i] - cstart]    = (PetscScalar)is_idx[i];
 44:     cmaparray[is_idx[i] - cstart] = (PetscScalar)(i + isstart); /* global index of iscol[i] */
 45:     idx[i]                        = is_idx[i] - cstart;         /* local index of iscol[i]  */
 46:   }
 47:   PetscCall(VecRestoreArray(x, &xarray));
 48:   PetscCall(VecRestoreArray(cmap, &cmaparray));
 49:   PetscCall(ISRestoreIndices(iscol, &is_idx));

 51:   /* Get iscol_d */
 52:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ncols, idx, PETSC_OWN_POINTER, iscol_d));
 53:   PetscCall(ISGetBlockSize(iscol, &i));
 54:   PetscCall(ISSetBlockSize(*iscol_d, i));

 56:   /* Get isrow_d */
 57:   PetscCall(ISGetLocalSize(isrow, &m));
 58:   rstart = mat->rmap->rstart;
 59:   PetscCall(PetscMalloc1(m, &idx));
 60:   PetscCall(ISGetIndices(isrow, &is_idx));
 61:   for (i = 0; i < m; i++) idx[i] = is_idx[i] - rstart;
 62:   PetscCall(ISRestoreIndices(isrow, &is_idx));

 64:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx, PETSC_OWN_POINTER, isrow_d));
 65:   PetscCall(ISGetBlockSize(isrow, &i));
 66:   PetscCall(ISSetBlockSize(*isrow_d, i));

 68:   /* (2) Scatter x and cmap using aij->Mvctx to get their off-process portions (see MatMult_MPIAIJ) */
 69: #if 0
 70:   if (!a->Mvctx_mpi1) {
 71:     /* a->Mvctx causes random 'count' in o-build? See src/mat/tests/runex59_2 */
 72:     a->Mvctx_mpi1_flg = PETSC_TRUE;
 73:     PetscCall(MatSetUpMultiply_MPIAIJ(mat));
 74:   }
 75:   Mvctx = a->Mvctx_mpi1;
 76: #endif
 77:   Mvctx = a->Mvctx;
 78:   PetscCall(VecScatterBegin(Mvctx, x, lvec, INSERT_VALUES, SCATTER_FORWARD));
 79:   PetscCall(VecScatterEnd(Mvctx, x, lvec, INSERT_VALUES, SCATTER_FORWARD));

 81:   PetscCall(VecScatterBegin(Mvctx, cmap, lcmap, INSERT_VALUES, SCATTER_FORWARD));
 82:   PetscCall(VecScatterEnd(Mvctx, cmap, lcmap, INSERT_VALUES, SCATTER_FORWARD));

 84:   /* (3) create sequential iscol_o (a subset of iscol) and isgarray */
 85:   /* off-process column indices */
 86:   count = 0;
 87:   PetscInt *cmap1;
 88:   PetscCall(PetscMalloc1(Bn, &idx));
 89:   PetscCall(PetscMalloc1(Bn, &cmap1));

 91:   PetscCall(VecGetArray(lvec, &xarray));
 92:   PetscCall(VecGetArray(lcmap, &cmaparray));
 93:   for (i = 0; i < Bn; i++) {
 94:     if (PetscRealPart(xarray[i]) > -1.0) {
 95:       idx[count]   = i;                                       /* local column index in off-diagonal part B */
 96:       cmap1[count] = (PetscInt)(PetscRealPart(cmaparray[i])); /* column index in submat */
 97:       count++;
 98:     }
 99:   }
100:   printf("[%d] Bn %d, count %d\n", rank, Bn, count);
101:   PetscCall(VecRestoreArray(lvec, &xarray));
102:   PetscCall(VecRestoreArray(lcmap, &cmaparray));
103:   if (count != 6) {
104:     printf("[%d] count %d != 6 lvec:\n", rank, count);
105:     PetscCall(VecView(lvec, 0));

107:     printf("[%d] count %d != 6 lcmap:\n", rank, count);
108:     PetscCall(VecView(lcmap, 0));
109:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "count %d != 6", count);
110:   }

112:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count, idx, PETSC_COPY_VALUES, iscol_o));
113:   /* cannot ensure iscol_o has same blocksize as iscol! */

115:   PetscCall(PetscFree(idx));

117:   *garray = cmap1;

119:   PetscCall(VecDestroy(&x));
120:   PetscCall(VecDestroy(&cmap));
121:   PetscCall(VecDestroy(&lcmap));
122:   PetscFunctionReturn(PETSC_SUCCESS);
123: }

125: int main(int argc, char **args)
126: {
127:   Mat         C, A;
128:   PetscInt    i, j, m = 3, n = 2, rstart, rend;
129:   PetscMPIInt size, rank;
130:   PetscScalar v;
131:   IS          isrow, iscol;

133:   PetscFunctionBeginUser;
134:   PetscCall(PetscInitialize(&argc, &args, NULL, help));
135:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
136:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
137:   n = 2 * size;

139:   PetscCall(MatCreate(PETSC_COMM_WORLD, &C));
140:   PetscCall(MatSetSizes(C, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
141:   PetscCall(MatSetFromOptions(C));
142:   PetscCall(MatSetUp(C));

144:   /*
145:         This is JUST to generate a nice test matrix, all processors fill up
146:     the entire matrix. This is not something one would ever do in practice.
147:   */
148:   PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
149:   for (i = rstart; i < rend; i++) {
150:     for (j = 0; j < m * n; j++) {
151:       v = i + j + 1;
152:       PetscCall(MatSetValues(C, 1, &i, 1, &j, &v, INSERT_VALUES));
153:     }
154:   }

156:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
157:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));

159:   /*
160:      Generate a new matrix consisting of every second row and column of
161:    the original matrix
162:   */
163:   PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
164:   /* Create parallel IS with the rows we want on THIS processor */
165:   PetscCall(ISCreateStride(PETSC_COMM_WORLD, (rend - rstart) / 2, rstart, 2, &isrow));
166:   /* Create parallel IS with the rows we want on THIS processor (same as rows for now) */
167:   PetscCall(ISCreateStride(PETSC_COMM_WORLD, (rend - rstart) / 2, rstart, 2, &iscol));

169:   IS              iscol_d, isrow_d, iscol_o;
170:   const PetscInt *garray;
171:   PetscCall(ISGetSeqIS_SameColDist_Private(C, isrow, iscol, &isrow_d, &iscol_d, &iscol_o, &garray));

173:   PetscCall(ISDestroy(&isrow_d));
174:   PetscCall(ISDestroy(&iscol_d));
175:   PetscCall(ISDestroy(&iscol_o));
176:   PetscCall(PetscFree(garray));

178:   PetscCall(MatCreateSubMatrix(C, isrow, iscol, MAT_INITIAL_MATRIX, &A));
179:   PetscCall(MatCreateSubMatrix(C, isrow, iscol, MAT_REUSE_MATRIX, &A));

181:   PetscCall(ISDestroy(&isrow));
182:   PetscCall(ISDestroy(&iscol));
183:   PetscCall(MatDestroy(&A));
184:   PetscCall(MatDestroy(&C));
185:   PetscCall(PetscFinalize());
186:   return 0;
187: }