Actual source code: ex19.c
1: static char help[] = "Tests reusing MPI parallel matrices and MatGetValues().\n\
2: To test the parallel matrix assembly, this example intentionally lays out\n\
3: the matrix across processors differently from the way it is assembled.\n\
4: This example uses bilinear elements on the unit square. Input arguments are:\n\
5: -m <size> : problem size\n\n";
7: #include <petscmat.h>
9: PetscErrorCode FormElementStiffness(PetscReal H, PetscScalar *Ke)
10: {
11: PetscFunctionBegin;
12: Ke[0] = H / 6.0;
13: Ke[1] = -.125 * H;
14: Ke[2] = H / 12.0;
15: Ke[3] = -.125 * H;
16: Ke[4] = -.125 * H;
17: Ke[5] = H / 6.0;
18: Ke[6] = -.125 * H;
19: Ke[7] = H / 12.0;
20: Ke[8] = H / 12.0;
21: Ke[9] = -.125 * H;
22: Ke[10] = H / 6.0;
23: Ke[11] = -.125 * H;
24: Ke[12] = -.125 * H;
25: Ke[13] = H / 12.0;
26: Ke[14] = -.125 * H;
27: Ke[15] = H / 6.0;
28: PetscFunctionReturn(PETSC_SUCCESS);
29: }
31: int main(int argc, char **args)
32: {
33: Mat C;
34: Vec u, b;
35: PetscMPIInt size, rank;
36: PetscInt i, m = 5, N, start, end, M, idx[4];
37: PetscInt j, nrsub, ncsub, *rsub, *csub, mystart, myend;
38: PetscBool flg;
39: PetscScalar one = 1.0, Ke[16], *vals;
40: PetscReal h, norm;
42: PetscFunctionBeginUser;
43: PetscCall(PetscInitialize(&argc, &args, NULL, help));
44: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
46: N = (m + 1) * (m + 1); /* dimension of matrix */
47: M = m * m; /* number of elements */
48: h = 1.0 / m; /* mesh width */
49: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
50: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
52: /* Create stiffness matrix */
53: PetscCall(MatCreate(PETSC_COMM_WORLD, &C));
54: PetscCall(MatSetSizes(C, PETSC_DECIDE, PETSC_DECIDE, N, N));
55: PetscCall(MatSetFromOptions(C));
56: PetscCall(MatSetUp(C));
58: start = rank * (M / size) + ((M % size) < rank ? (M % size) : rank);
59: end = start + M / size + ((M % size) > rank);
61: /* Form the element stiffness for the Laplacian */
62: PetscCall(FormElementStiffness(h * h, Ke));
63: for (i = start; i < end; i++) {
64: /* location of lower left corner of element */
65: /* node numbers for the four corners of element */
66: idx[0] = (m + 1) * (i / m) + (i % m);
67: idx[1] = idx[0] + 1;
68: idx[2] = idx[1] + m + 1;
69: idx[3] = idx[2] - 1;
70: PetscCall(MatSetValues(C, 4, idx, 4, idx, Ke, ADD_VALUES));
71: }
72: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
73: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
75: /* Assemble the matrix again */
76: PetscCall(MatZeroEntries(C));
78: for (i = start; i < end; i++) {
79: /* location of lower left corner of element */
80: /* node numbers for the four corners of element */
81: idx[0] = (m + 1) * (i / m) + (i % m);
82: idx[1] = idx[0] + 1;
83: idx[2] = idx[1] + m + 1;
84: idx[3] = idx[2] - 1;
85: PetscCall(MatSetValues(C, 4, idx, 4, idx, Ke, ADD_VALUES));
86: }
87: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
88: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
90: /* Create test vectors */
91: PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
92: PetscCall(VecSetSizes(u, PETSC_DECIDE, N));
93: PetscCall(VecSetFromOptions(u));
94: PetscCall(VecDuplicate(u, &b));
95: PetscCall(VecSet(u, one));
97: /* Check error */
98: PetscCall(MatMult(C, u, b));
99: PetscCall(VecNorm(b, NORM_2, &norm));
100: if (norm > PETSC_SQRT_MACHINE_EPSILON) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error b %g should be near 0\n", (double)norm));
102: /* Now test MatGetValues() */
103: PetscCall(PetscOptionsHasName(NULL, NULL, "-get_values", &flg));
104: if (flg) {
105: PetscCall(MatGetOwnershipRange(C, &mystart, &myend));
106: nrsub = myend - mystart;
107: ncsub = 4;
108: PetscCall(PetscMalloc1(nrsub * ncsub, &vals));
109: PetscCall(PetscMalloc1(nrsub, &rsub));
110: PetscCall(PetscMalloc1(ncsub, &csub));
111: for (i = myend - 1; i >= mystart; i--) rsub[myend - i - 1] = i;
112: for (i = 0; i < ncsub; i++) csub[i] = 2 * (ncsub - i) + mystart;
113: PetscCall(MatGetValues(C, nrsub, rsub, ncsub, csub, vals));
114: PetscCall(MatView(C, PETSC_VIEWER_STDOUT_WORLD));
115: PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "processor number %d: start=%" PetscInt_FMT ", end=%" PetscInt_FMT ", mystart=%" PetscInt_FMT ", myend=%" PetscInt_FMT "\n", rank, start, end, mystart, myend));
116: for (i = 0; i < nrsub; i++) {
117: for (j = 0; j < ncsub; j++) {
118: if (PetscImaginaryPart(vals[i * ncsub + j]) != 0.0) {
119: PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, " C[%" PetscInt_FMT ", %" PetscInt_FMT "] = %g + %g i\n", rsub[i], csub[j], (double)PetscRealPart(vals[i * ncsub + j]), (double)PetscImaginaryPart(vals[i * ncsub + j])));
120: } else {
121: PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, " C[%" PetscInt_FMT ", %" PetscInt_FMT "] = %g\n", rsub[i], csub[j], (double)PetscRealPart(vals[i * ncsub + j])));
122: }
123: }
124: }
125: PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
126: PetscCall(PetscFree(rsub));
127: PetscCall(PetscFree(csub));
128: PetscCall(PetscFree(vals));
129: }
131: /* Free data structures */
132: PetscCall(VecDestroy(&u));
133: PetscCall(VecDestroy(&b));
134: PetscCall(MatDestroy(&C));
135: PetscCall(PetscFinalize());
136: return 0;
137: }
139: /*TEST
141: test:
142: nsize: 4
144: TEST*/