Actual source code: ex8.c
2: static char help[] = "Illustrates use of the preconditioner ASM.\n\
3: The Additive Schwarz Method for solving a linear system in parallel with KSP. The\n\
4: code indicates the procedure for setting user-defined subdomains. Input\n\
5: parameters include:\n\
6: -user_set_subdomain_solvers: User explicitly sets subdomain solvers\n\
7: -user_set_subdomains: Activate user-defined subdomains\n\n";
9: /*
10: Note: This example focuses on setting the subdomains for the ASM
11: preconditioner for a problem on a 2D rectangular grid. See ex1.c
12: and ex2.c for more detailed comments on the basic usage of KSP
13: (including working with matrices and vectors).
15: The ASM preconditioner is fully parallel, but currently the routine
16: PCASMCreateSubdomains2D(), which is used in this example to demonstrate
17: user-defined subdomains (activated via -user_set_subdomains), is
18: uniprocessor only.
20: This matrix in this linear system arises from the discretized Laplacian,
21: and thus is not very interesting in terms of experimenting with variants
22: of the ASM preconditioner.
23: */
25: /*T
26: Concepts: KSP^Additive Schwarz Method (ASM) with user-defined subdomains
27: Processors: n
28: T*/
30: /*
31: Include "petscksp.h" so that we can use KSP solvers. Note that this file
32: automatically includes:
33: petscsys.h - base PETSc routines petscvec.h - vectors
34: petscmat.h - matrices
35: petscis.h - index sets petscksp.h - Krylov subspace methods
36: petscviewer.h - viewers petscpc.h - preconditioners
37: */
38: #include <petscksp.h>
40: int main(int argc,char **args)
41: {
42: Vec x,b,u; /* approx solution, RHS, exact solution */
43: Mat A; /* linear system matrix */
44: KSP ksp; /* linear solver context */
45: PC pc; /* PC context */
46: IS *is,*is_local; /* array of index sets that define the subdomains */
47: PetscInt overlap = 1; /* width of subdomain overlap */
48: PetscInt Nsub; /* number of subdomains */
49: PetscInt m = 15,n = 17; /* mesh dimensions in x- and y- directions */
50: PetscInt M = 2,N = 1; /* number of subdomains in x- and y- directions */
51: PetscInt i,j,Ii,J,Istart,Iend;
52: PetscMPIInt size;
53: PetscBool flg;
54: PetscBool user_subdomains = PETSC_FALSE;
55: PetscScalar v, one = 1.0;
56: PetscReal e;
58: PetscInitialize(&argc,&args,(char*)0,help);
59: MPI_Comm_size(PETSC_COMM_WORLD,&size);
60: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
61: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
62: PetscOptionsGetInt(NULL,NULL,"-Mdomains",&M,NULL);
63: PetscOptionsGetInt(NULL,NULL,"-Ndomains",&N,NULL);
64: PetscOptionsGetInt(NULL,NULL,"-overlap",&overlap,NULL);
65: PetscOptionsGetBool(NULL,NULL,"-user_set_subdomains",&user_subdomains,NULL);
67: /* -------------------------------------------------------------------
68: Compute the matrix and right-hand-side vector that define
69: the linear system, Ax = b.
70: ------------------------------------------------------------------- */
72: /*
73: Assemble the matrix for the five point stencil, YET AGAIN
74: */
75: MatCreate(PETSC_COMM_WORLD,&A);
76: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
77: MatSetFromOptions(A);
78: MatSetUp(A);
79: MatGetOwnershipRange(A,&Istart,&Iend);
80: for (Ii=Istart; Ii<Iend; Ii++) {
81: v = -1.0; i = Ii/n; j = Ii - i*n;
82: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
83: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
84: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
85: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
86: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
87: }
88: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
89: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
91: /*
92: Create and set vectors
93: */
94: MatCreateVecs(A,&u,&b);
95: VecDuplicate(u,&x);
96: VecSet(u,one);
97: MatMult(A,u,b);
99: /*
100: Create linear solver context
101: */
102: KSPCreate(PETSC_COMM_WORLD,&ksp);
104: /*
105: Set operators. Here the matrix that defines the linear system
106: also serves as the preconditioning matrix.
107: */
108: KSPSetOperators(ksp,A,A);
110: /*
111: Set the default preconditioner for this program to be ASM
112: */
113: KSPGetPC(ksp,&pc);
114: PCSetType(pc,PCASM);
116: /* -------------------------------------------------------------------
117: Define the problem decomposition
118: ------------------------------------------------------------------- */
120: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121: Basic method, should be sufficient for the needs of many users.
122: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124: Set the overlap, using the default PETSc decomposition via
125: PCASMSetOverlap(pc,overlap);
126: Could instead use the option -pc_asm_overlap <ovl>
128: Set the total number of blocks via -pc_asm_blocks <blks>
129: Note: The ASM default is to use 1 block per processor. To
130: experiment on a single processor with various overlaps, you
131: must specify use of multiple blocks!
132: */
134: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135: More advanced method, setting user-defined subdomains
136: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138: Firstly, create index sets that define the subdomains. The utility
139: routine PCASMCreateSubdomains2D() is a simple example (that currently
140: supports 1 processor only!). More generally, the user should write
141: a custom routine for a particular problem geometry.
143: Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
144: to set the subdomains for the ASM preconditioner.
145: */
147: if (!user_subdomains) { /* basic version */
148: PCASMSetOverlap(pc,overlap);
149: } else { /* advanced version */
151: PCASMCreateSubdomains2D(m,n,M,N,1,overlap,&Nsub,&is,&is_local);
152: PCASMSetLocalSubdomains(pc,Nsub,is,is_local);
153: flg = PETSC_FALSE;
154: PetscOptionsGetBool(NULL,NULL,"-subdomain_view",&flg,NULL);
155: if (flg) {
156: PetscPrintf(PETSC_COMM_SELF,"Nmesh points: %D x %D; subdomain partition: %D x %D; overlap: %D; Nsub: %D\n",m,n,M,N,overlap,Nsub);
157: PetscPrintf(PETSC_COMM_SELF,"IS:\n");
158: for (i=0; i<Nsub; i++) {
159: PetscPrintf(PETSC_COMM_SELF," IS[%D]\n",i);
160: ISView(is[i],PETSC_VIEWER_STDOUT_SELF);
161: }
162: PetscPrintf(PETSC_COMM_SELF,"IS_local:\n");
163: for (i=0; i<Nsub; i++) {
164: PetscPrintf(PETSC_COMM_SELF," IS_local[%D]\n",i);
165: ISView(is_local[i],PETSC_VIEWER_STDOUT_SELF);
166: }
167: }
168: }
170: /* -------------------------------------------------------------------
171: Set the linear solvers for the subblocks
172: ------------------------------------------------------------------- */
174: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175: Basic method, should be sufficient for the needs of most users.
176: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
178: By default, the ASM preconditioner uses the same solver on each
179: block of the problem. To set the same solver options on all blocks,
180: use the prefix -sub before the usual PC and KSP options, e.g.,
181: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
183: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184: Advanced method, setting different solvers for various blocks.
185: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
187: Note that each block's KSP context is completely independent of
188: the others, and the full range of uniprocessor KSP options is
189: available for each block.
191: - Use PCASMGetSubKSP() to extract the array of KSP contexts for
192: the local blocks.
193: - See ex7.c for a simple example of setting different linear solvers
194: for the individual blocks for the block Jacobi method (which is
195: equivalent to the ASM method with zero overlap).
196: */
198: flg = PETSC_FALSE;
199: PetscOptionsGetBool(NULL,NULL,"-user_set_subdomain_solvers",&flg,NULL);
200: if (flg) {
201: KSP *subksp; /* array of KSP contexts for local subblocks */
202: PetscInt nlocal,first; /* number of local subblocks, first local subblock */
203: PC subpc; /* PC context for subblock */
204: PetscBool isasm;
206: PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");
208: /*
209: Set runtime options
210: */
211: KSPSetFromOptions(ksp);
213: /*
214: Flag an error if PCTYPE is changed from the runtime options
215: */
216: PetscObjectTypeCompare((PetscObject)pc,PCASM,&isasm);
219: /*
220: Call KSPSetUp() to set the block Jacobi data structures (including
221: creation of an internal KSP context for each block).
223: Note: KSPSetUp() MUST be called before PCASMGetSubKSP().
224: */
225: KSPSetUp(ksp);
227: /*
228: Extract the array of KSP contexts for the local blocks
229: */
230: PCASMGetSubKSP(pc,&nlocal,&first,&subksp);
232: /*
233: Loop over the local blocks, setting various KSP options
234: for each block.
235: */
236: for (i=0; i<nlocal; i++) {
237: KSPGetPC(subksp[i],&subpc);
238: PCSetType(subpc,PCILU);
239: KSPSetType(subksp[i],KSPGMRES);
240: KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
241: }
242: } else {
243: /*
244: Set runtime options
245: */
246: KSPSetFromOptions(ksp);
247: }
249: /* -------------------------------------------------------------------
250: Solve the linear system
251: ------------------------------------------------------------------- */
253: KSPSolve(ksp,b,x);
255: /* -------------------------------------------------------------------
256: Compare result to the exact solution
257: ------------------------------------------------------------------- */
258: VecAXPY(x,-1.0,u);
259: VecNorm(x,NORM_INFINITY, &e);
261: flg = PETSC_FALSE;
262: PetscOptionsGetBool(NULL,NULL,"-print_error",&flg,NULL);
263: if (flg) {
264: PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %g\n",(double) e);
265: }
267: /*
268: Free work space. All PETSc objects should be destroyed when they
269: are no longer needed.
270: */
272: if (user_subdomains) {
273: for (i=0; i<Nsub; i++) {
274: ISDestroy(&is[i]);
275: ISDestroy(&is_local[i]);
276: }
277: PetscFree(is);
278: PetscFree(is_local);
279: }
280: KSPDestroy(&ksp);
281: VecDestroy(&u);
282: VecDestroy(&x);
283: VecDestroy(&b);
284: MatDestroy(&A);
285: PetscFinalize();
286: return 0;
287: }
289: /*TEST
291: test:
292: suffix: 1
293: args: -print_error
295: TEST*/