Actual source code: ex111.c

  1: static char help[] = "Tests sequential and parallel MatMatMatMult() and MatPtAP(). Modified from ex96.c \n\
  2:   -Mx <xg>, where <xg> = number of coarse grid points in the x-direction\n\
  3:   -My <yg>, where <yg> = number of coarse grid points in the y-direction\n\
  4:   -Mz <zg>, where <zg> = number of coarse grid points in the z-direction\n\
  5:   -Npx <npx>, where <npx> = number of processors in the x-direction\n\
  6:   -Npy <npy>, where <npy> = number of processors in the y-direction\n\
  7:   -Npz <npz>, where <npz> = number of processors in the z-direction\n\n";

  9: /*
 10:     Example of usage: mpiexec -n 3 ./ex41 -Mx 10 -My 10 -Mz 10
 11: */

 13: #include <petscdm.h>
 14: #include <petscdmda.h>

 16: /* User-defined application contexts */
 17: typedef struct {
 18:   PetscInt mx, my, mz;     /* number grid points in x, y and z direction */
 19:   Vec      localX, localF; /* local vectors with ghost region */
 20:   DM       da;
 21:   Vec      x, b, r; /* global vectors */
 22:   Mat      J;       /* Jacobian on grid */
 23: } GridCtx;
 24: typedef struct {
 25:   GridCtx  fine;
 26:   GridCtx  coarse;
 27:   PetscInt ratio;
 28:   Mat      Ii; /* interpolation from coarse to fine */
 29: } AppCtx;

 31: #define COARSE_LEVEL 0
 32: #define FINE_LEVEL   1

 34: /*
 35:       Mm_ratio - ration of grid lines between fine and coarse grids.
 36: */
 37: int main(int argc, char **argv)
 38: {
 39:   AppCtx          user;
 40:   PetscMPIInt     size, rank;
 41:   PetscInt        m, n, M, N, i, nrows;
 42:   PetscScalar     one  = 1.0;
 43:   PetscReal       fill = 2.0;
 44:   Mat             A, P, R, C, PtAP, D;
 45:   PetscScalar    *array;
 46:   PetscRandom     rdm;
 47:   PetscBool       Test_3D = PETSC_FALSE, flg;
 48:   const PetscInt *ia, *ja;

 50:   PetscFunctionBeginUser;
 51:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 52:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 53:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));

 55:   /* Get size of fine grids and coarse grids */
 56:   user.ratio     = 2;
 57:   user.coarse.mx = 4;
 58:   user.coarse.my = 4;
 59:   user.coarse.mz = 4;

 61:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-Mx", &user.coarse.mx, NULL));
 62:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-My", &user.coarse.my, NULL));
 63:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-Mz", &user.coarse.mz, NULL));
 64:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ratio", &user.ratio, NULL));
 65:   if (user.coarse.mz) Test_3D = PETSC_TRUE;

 67:   user.fine.mx = user.ratio * (user.coarse.mx - 1) + 1;
 68:   user.fine.my = user.ratio * (user.coarse.my - 1) + 1;
 69:   user.fine.mz = user.ratio * (user.coarse.mz - 1) + 1;

 71:   if (rank == 0) {
 72:     if (!Test_3D) {
 73:       PetscCall(PetscPrintf(PETSC_COMM_SELF, "coarse grids: %" PetscInt_FMT " %" PetscInt_FMT "; fine grids: %" PetscInt_FMT " %" PetscInt_FMT "\n", user.coarse.mx, user.coarse.my, user.fine.mx, user.fine.my));
 74:     } else {
 75:       PetscCall(PetscPrintf(PETSC_COMM_SELF, "coarse grids: %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "; fine grids: %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", user.coarse.mx, user.coarse.my, user.coarse.mz, user.fine.mx,
 76:                             user.fine.my, user.fine.mz));
 77:     }
 78:   }

 80:   /* Set up distributed array for fine grid */
 81:   if (!Test_3D) {
 82:     PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.fine.mx, user.fine.my, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &user.fine.da));
 83:   } else {
 84:     PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.fine.mx, user.fine.my, user.fine.mz, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, NULL, &user.fine.da));
 85:   }
 86:   PetscCall(DMSetFromOptions(user.fine.da));
 87:   PetscCall(DMSetUp(user.fine.da));

 89:   /* Create and set A at fine grids */
 90:   PetscCall(DMSetMatType(user.fine.da, MATAIJ));
 91:   PetscCall(DMCreateMatrix(user.fine.da, &A));
 92:   PetscCall(MatGetLocalSize(A, &m, &n));
 93:   PetscCall(MatGetSize(A, &M, &N));

 95:   /* set val=one to A (replace with random values!) */
 96:   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rdm));
 97:   PetscCall(PetscRandomSetFromOptions(rdm));
 98:   if (size == 1) {
 99:     PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg));
100:     if (flg) {
101:       PetscCall(MatSeqAIJGetArray(A, &array));
102:       for (i = 0; i < ia[nrows]; i++) array[i] = one;
103:       PetscCall(MatSeqAIJRestoreArray(A, &array));
104:     }
105:     PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg));
106:   } else {
107:     Mat AA, AB;
108:     PetscCall(MatMPIAIJGetSeqAIJ(A, &AA, &AB, NULL));
109:     PetscCall(MatGetRowIJ(AA, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg));
110:     if (flg) {
111:       PetscCall(MatSeqAIJGetArray(AA, &array));
112:       for (i = 0; i < ia[nrows]; i++) array[i] = one;
113:       PetscCall(MatSeqAIJRestoreArray(AA, &array));
114:     }
115:     PetscCall(MatRestoreRowIJ(AA, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg));
116:     PetscCall(MatGetRowIJ(AB, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg));
117:     if (flg) {
118:       PetscCall(MatSeqAIJGetArray(AB, &array));
119:       for (i = 0; i < ia[nrows]; i++) array[i] = one;
120:       PetscCall(MatSeqAIJRestoreArray(AB, &array));
121:     }
122:     PetscCall(MatRestoreRowIJ(AB, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg));
123:   }
124:   /* Set up distributed array for coarse grid */
125:   if (!Test_3D) {
126:     PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.coarse.mx, user.coarse.my, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &user.coarse.da));
127:   } else {
128:     PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.coarse.mx, user.coarse.my, user.coarse.mz, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, NULL, &user.coarse.da));
129:   }
130:   PetscCall(DMSetFromOptions(user.coarse.da));
131:   PetscCall(DMSetUp(user.coarse.da));

133:   /* Create interpolation between the fine and coarse grids */
134:   PetscCall(DMCreateInterpolation(user.coarse.da, user.fine.da, &P, NULL));

136:   /* Get R = P^T */
137:   PetscCall(MatTranspose(P, MAT_INITIAL_MATRIX, &R));

139:   /* C = R*A*P */
140:   /* Developer's API */
141:   PetscCall(MatProductCreate(R, A, P, &D));
142:   PetscCall(MatProductSetType(D, MATPRODUCT_ABC));
143:   PetscCall(MatProductSetFromOptions(D));
144:   PetscCall(MatProductSymbolic(D));
145:   PetscCall(MatProductNumeric(D));
146:   PetscCall(MatProductNumeric(D)); /* Test reuse symbolic D */

148:   /* User's API */
149:   { /* Test MatMatMatMult_Basic() */
150:     Mat Adense, Cdense;
151:     PetscCall(MatConvert(A, MATDENSE, MAT_INITIAL_MATRIX, &Adense));
152:     PetscCall(MatMatMatMult(R, Adense, P, MAT_INITIAL_MATRIX, fill, &Cdense));
153:     PetscCall(MatMatMatMult(R, Adense, P, MAT_REUSE_MATRIX, fill, &Cdense));

155:     PetscCall(MatMultEqual(D, Cdense, 10, &flg));
156:     PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "D*v != Cdense*v");
157:     PetscCall(MatDestroy(&Adense));
158:     PetscCall(MatDestroy(&Cdense));
159:   }

161:   PetscCall(MatMatMatMult(R, A, P, MAT_INITIAL_MATRIX, fill, &C));
162:   PetscCall(MatMatMatMult(R, A, P, MAT_REUSE_MATRIX, fill, &C));
163:   PetscCall(MatProductClear(C));

165:   /* Test D == C */
166:   PetscCall(MatEqual(D, C, &flg));
167:   PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "D != C");

169:   /* Test C == PtAP */
170:   PetscCall(MatPtAP(A, P, MAT_INITIAL_MATRIX, fill, &PtAP));
171:   PetscCall(MatPtAP(A, P, MAT_REUSE_MATRIX, fill, &PtAP));
172:   PetscCall(MatEqual(C, PtAP, &flg));
173:   PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "C != PtAP");
174:   PetscCall(MatDestroy(&PtAP));

176:   /* Clean up */
177:   PetscCall(MatDestroy(&A));
178:   PetscCall(PetscRandomDestroy(&rdm));
179:   PetscCall(DMDestroy(&user.fine.da));
180:   PetscCall(DMDestroy(&user.coarse.da));
181:   PetscCall(MatDestroy(&P));
182:   PetscCall(MatDestroy(&R));
183:   PetscCall(MatDestroy(&C));
184:   PetscCall(MatDestroy(&D));
185:   PetscCall(PetscFinalize());
186:   return 0;
187: }

189: /*TEST

191:    test:

193:    test:
194:       suffix: 2
195:       nsize: 2
196:       args: -matmatmatmult_via scalable

198:    test:
199:       suffix: 3
200:       nsize: 2
201:       args: -matmatmatmult_via nonscalable
202:       output_file: output/ex111_1.out

204: TEST*/