Actual source code: ex18.c
1: static const char help[] = "Solves a (permuted) linear system in parallel with KSP.\n\
2: Input parameters include:\n\
3: -permute <natural,rcm,nd,...> : solve system in permuted indexing\n\
4: -random_exact_sol : use a random exact solution vector\n\
5: -view_exact_sol : write exact solution vector to stdout\n\
6: -m <mesh_x> : number of mesh points in x-direction\n\
7: -n <mesh_y> : number of mesh points in y-direction\n\n";
9: /*
10: Include "petscksp.h" so that we can use KSP solvers. Note that this file
11: automatically includes:
12: petscsys.h - base PETSc routines petscvec.h - vectors
13: petscmat.h - matrices
14: petscis.h - index sets petscksp.h - Krylov subspace methods
15: petscviewer.h - viewers petscpc.h - preconditioners
16: */
17: #include <petscksp.h>
19: int main(int argc,char **args)
20: {
21: Vec x,b,u; /* approx solution, RHS, exact solution */
22: Mat A; /* linear system matrix */
23: KSP ksp; /* linear solver context */
24: PetscRandom rctx; /* random number generator context */
25: PetscReal norm; /* norm of solution error */
26: PetscInt i,j,Ii,J,Istart,Iend,m,n,its;
28: PetscBool random_exact_sol,view_exact_sol,permute;
29: char ordering[256] = MATORDERINGRCM;
30: IS rowperm = NULL,colperm = NULL;
31: PetscScalar v;
32: #if defined(PETSC_USE_LOG)
33: PetscLogStage stage;
34: #endif
36: PetscInitialize(&argc,&args,(char*)0,help);
37: PetscOptionsBegin(PETSC_COMM_WORLD,NULL,"Poisson example options","");
38: {
39: m = 8;
40: PetscOptionsInt("-m","Number of grid points in x direction","",m,&m,NULL);
41: n = m-1;
42: PetscOptionsInt("-n","Number of grid points in y direction","",n,&n,NULL);
43: random_exact_sol = PETSC_FALSE;
44: PetscOptionsBool("-random_exact_sol","Choose a random exact solution","",random_exact_sol,&random_exact_sol,NULL);
45: view_exact_sol = PETSC_FALSE;
46: PetscOptionsBool("-view_exact_sol","View exact solution","",view_exact_sol,&view_exact_sol,NULL);
47: permute = PETSC_FALSE;
48: PetscOptionsFList("-permute","Permute matrix and vector to solving in new ordering","",MatOrderingList,ordering,ordering,sizeof(ordering),&permute);
49: }
50: PetscOptionsEnd();
51: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
52: Compute the matrix and right-hand-side vector that define
53: the linear system, Ax = b.
54: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
55: /*
56: Create parallel matrix, specifying only its global dimensions.
57: When using MatCreate(), the matrix format can be specified at
58: runtime. Also, the parallel partitioning of the matrix is
59: determined by PETSc at runtime.
61: Performance tuning note: For problems of substantial size,
62: preallocation of matrix memory is crucial for attaining good
63: performance. See the matrix chapter of the users manual for details.
64: */
65: MatCreate(PETSC_COMM_WORLD,&A);
66: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
67: MatSetFromOptions(A);
68: MatMPIAIJSetPreallocation(A,5,NULL,5,NULL);
69: MatSeqAIJSetPreallocation(A,5,NULL);
70: MatSetUp(A);
72: /*
73: Currently, all PETSc parallel matrix formats are partitioned by
74: contiguous chunks of rows across the processors. Determine which
75: rows of the matrix are locally owned.
76: */
77: MatGetOwnershipRange(A,&Istart,&Iend);
79: /*
80: Set matrix elements for the 2-D, five-point stencil in parallel.
81: - Each processor needs to insert only elements that it owns
82: locally (but any non-local elements will be sent to the
83: appropriate processor during matrix assembly).
84: - Always specify global rows and columns of matrix entries.
86: Note: this uses the less common natural ordering that orders first
87: all the unknowns for x = h then for x = 2h etc; Hence you see J = Ii +- n
88: instead of J = I +- m as you might expect. The more standard ordering
89: would first do all variables for y = h, then y = 2h etc.
91: */
92: PetscLogStageRegister("Assembly", &stage);
93: PetscLogStagePush(stage);
94: for (Ii=Istart; Ii<Iend; Ii++) {
95: v = -1.0; i = Ii/n; j = Ii - i*n;
96: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
97: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
98: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
99: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
100: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
101: }
103: /*
104: Assemble matrix, using the 2-step process:
105: MatAssemblyBegin(), MatAssemblyEnd()
106: Computations can be done while messages are in transition
107: by placing code between these two statements.
108: */
109: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
110: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
111: PetscLogStagePop();
113: /* A is symmetric. Set symmetric flag to enable ICC/Cholesky preconditioner */
114: MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);
116: /*
117: Create parallel vectors.
118: - We form 1 vector from scratch and then duplicate as needed.
119: - When using VecCreate(), VecSetSizes and VecSetFromOptions()
120: in this example, we specify only the
121: vector's global dimension; the parallel partitioning is determined
122: at runtime.
123: - When solving a linear system, the vectors and matrices MUST
124: be partitioned accordingly. PETSc automatically generates
125: appropriately partitioned matrices and vectors when MatCreate()
126: and VecCreate() are used with the same communicator.
127: - The user can alternatively specify the local vector and matrix
128: dimensions when more sophisticated partitioning is needed
129: (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
130: below).
131: */
132: VecCreate(PETSC_COMM_WORLD,&u);
133: VecSetSizes(u,PETSC_DECIDE,m*n);
134: VecSetFromOptions(u);
135: VecDuplicate(u,&b);
136: VecDuplicate(b,&x);
138: /*
139: Set exact solution; then compute right-hand-side vector.
140: By default we use an exact solution of a vector with all
141: elements of 1.0; Alternatively, using the runtime option
142: -random_sol forms a solution vector with random components.
143: */
144: if (random_exact_sol) {
145: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
146: PetscRandomSetFromOptions(rctx);
147: VecSetRandom(u,rctx);
148: PetscRandomDestroy(&rctx);
149: } else {
150: VecSet(u,1.0);
151: }
152: MatMult(A,u,b);
154: /*
155: View the exact solution vector if desired
156: */
157: if (view_exact_sol) VecView(u,PETSC_VIEWER_STDOUT_WORLD);
159: if (permute) {
160: Mat Aperm;
161: MatGetOrdering(A,ordering,&rowperm,&colperm);
162: MatPermute(A,rowperm,colperm,&Aperm);
163: VecPermute(b,colperm,PETSC_FALSE);
164: MatDestroy(&A);
165: A = Aperm; /* Replace original operator with permuted version */
166: }
168: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
169: Create the linear solver and set various options
170: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
172: /*
173: Create linear solver context
174: */
175: KSPCreate(PETSC_COMM_WORLD,&ksp);
177: /*
178: Set operators. Here the matrix that defines the linear system
179: also serves as the preconditioning matrix.
180: */
181: KSPSetOperators(ksp,A,A);
183: /*
184: Set linear solver defaults for this problem (optional).
185: - By extracting the KSP and PC contexts from the KSP context,
186: we can then directly call any KSP and PC routines to set
187: various options.
188: - The following two statements are optional; all of these
189: parameters could alternatively be specified at runtime via
190: KSPSetFromOptions(). All of these defaults can be
191: overridden at runtime, as indicated below.
192: */
193: KSPSetTolerances(ksp,1.e-2/((m+1)*(n+1)),PETSC_DEFAULT,PETSC_DEFAULT,
194: PETSC_DEFAULT);
196: /*
197: Set runtime options, e.g.,
198: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
199: These options will override those specified above as long as
200: KSPSetFromOptions() is called _after_ any other customization
201: routines.
202: */
203: KSPSetFromOptions(ksp);
205: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
206: Solve the linear system
207: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
209: KSPSolve(ksp,b,x);
211: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
212: Check solution and clean up
213: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
215: if (permute) VecPermute(x,rowperm,PETSC_TRUE);
217: /*
218: Check the error
219: */
220: VecAXPY(x,-1.0,u);
221: VecNorm(x,NORM_2,&norm);
222: KSPGetIterationNumber(ksp,&its);
224: /*
225: Print convergence information. PetscPrintf() produces a single
226: print statement from all processes that share a communicator.
227: An alternative is PetscFPrintf(), which prints to a file.
228: */
229: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);
231: /*
232: Free work space. All PETSc objects should be destroyed when they
233: are no longer needed.
234: */
235: KSPDestroy(&ksp);
236: VecDestroy(&u)); PetscCall(VecDestroy(&x);
237: VecDestroy(&b)); PetscCall(MatDestroy(&A);
238: ISDestroy(&rowperm)); PetscCall(ISDestroy(&colperm);
240: /*
241: Always call PetscFinalize() before exiting a program. This routine
242: - finalizes the PETSc libraries as well as MPI
243: - provides summary and diagnostic information if certain runtime
244: options are chosen (e.g., -log_view).
245: */
246: PetscFinalize();
247: return 0;
248: }
250: /*TEST
252: test:
253: nsize: 3
254: args: -m 39 -n 18 -ksp_monitor_short -permute nd
255: requires: !single
257: test:
258: suffix: 2
259: nsize: 3
260: args: -m 39 -n 18 -ksp_monitor_short -permute rcm
261: requires: !single
263: test:
264: suffix: 3
265: nsize: 3
266: args: -m 13 -n 17 -ksp_monitor_short -ksp_type cg -ksp_cg_single_reduction
267: requires: !single
269: test:
270: suffix: bas
271: args: -m 13 -n 17 -ksp_monitor_short -ksp_type cg -pc_type icc -pc_factor_mat_solver_type bas -ksp_view -pc_factor_levels 1
272: filter: grep -v "variant "
273: requires: !single
275: TEST*/