Actual source code: ex11.c
2: static char help[] = "Solves a linear system in parallel with KSP.\n\n";
4: /*T
5: Concepts: KSP^solving a Helmholtz equation
6: Concepts: complex numbers;
7: Concepts: Helmholtz equation
8: Processors: n
9: T*/
11: /*
12: Description: Solves a complex linear system in parallel with KSP.
14: The model problem:
15: Solve Helmholtz equation on the unit square: (0,1) x (0,1)
16: -delta u - sigma1*u + i*sigma2*u = f,
17: where delta = Laplace operator
18: Dirichlet b.c.'s on all sides
19: Use the 2-D, five-point finite difference stencil.
21: Compiling the code:
22: This code uses the complex numbers version of PETSc, so configure
23: must be run to enable this
24: */
26: /*
27: Include "petscksp.h" so that we can use KSP solvers. Note that this file
28: automatically includes:
29: petscsys.h - base PETSc routines petscvec.h - vectors
30: petscmat.h - matrices
31: petscis.h - index sets petscksp.h - Krylov subspace methods
32: petscviewer.h - viewers petscpc.h - preconditioners
33: */
34: #include <petscksp.h>
36: int main(int argc,char **args)
37: {
38: Vec x,b,u; /* approx solution, RHS, exact solution */
39: Mat A; /* linear system matrix */
40: KSP ksp; /* linear solver context */
41: PetscReal norm; /* norm of solution error */
42: PetscInt dim,i,j,Ii,J,Istart,Iend,n = 6,its,use_random;
43: PetscScalar v,none = -1.0,sigma2,pfive = 0.5,*xa;
44: PetscRandom rctx;
45: PetscReal h2,sigma1 = 100.0;
46: PetscBool flg = PETSC_FALSE;
48: PetscInitialize(&argc,&args,(char*)0,help);
49: PetscOptionsGetReal(NULL,NULL,"-sigma1",&sigma1,NULL);
50: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
51: dim = n*n;
53: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
54: Compute the matrix and right-hand-side vector that define
55: the linear system, Ax = b.
56: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
57: /*
58: Create parallel matrix, specifying only its global dimensions.
59: When using MatCreate(), the matrix format can be specified at
60: runtime. Also, the parallel partitioning of the matrix is
61: determined by PETSc at runtime.
62: */
63: MatCreate(PETSC_COMM_WORLD,&A);
64: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,dim,dim);
65: MatSetFromOptions(A);
66: MatSetUp(A);
68: /*
69: Currently, all PETSc parallel matrix formats are partitioned by
70: contiguous chunks of rows across the processors. Determine which
71: rows of the matrix are locally owned.
72: */
73: MatGetOwnershipRange(A,&Istart,&Iend);
75: /*
76: Set matrix elements in parallel.
77: - Each processor needs to insert only elements that it owns
78: locally (but any non-local elements will be sent to the
79: appropriate processor during matrix assembly).
80: - Always specify global rows and columns of matrix entries.
81: */
83: PetscOptionsGetBool(NULL,NULL,"-norandom",&flg,NULL);
84: if (flg) use_random = 0;
85: else use_random = 1;
86: if (use_random) {
87: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
88: PetscRandomSetFromOptions(rctx);
89: PetscRandomSetInterval(rctx,0.0,PETSC_i);
90: } else {
91: sigma2 = 10.0*PETSC_i;
92: }
93: h2 = 1.0/((n+1)*(n+1));
94: for (Ii=Istart; Ii<Iend; Ii++) {
95: v = -1.0; i = Ii/n; j = Ii - i*n;
96: if (i>0) {
97: J = Ii-n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);
98: }
99: if (i<n-1) {
100: J = Ii+n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);
101: }
102: if (j>0) {
103: J = Ii-1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);
104: }
105: if (j<n-1) {
106: J = Ii+1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);
107: }
108: if (use_random) PetscRandomGetValue(rctx,&sigma2);
109: v = 4.0 - sigma1*h2 + sigma2*h2;
110: MatSetValues(A,1,&Ii,1,&Ii,&v,ADD_VALUES);
111: }
112: if (use_random) PetscRandomDestroy(&rctx);
114: /*
115: Assemble matrix, using the 2-step process:
116: MatAssemblyBegin(), MatAssemblyEnd()
117: Computations can be done while messages are in transition
118: by placing code between these two statements.
119: */
120: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
121: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
123: /*
124: Create parallel vectors.
125: - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
126: we specify only the vector's global
127: dimension; the parallel partitioning is determined at runtime.
128: - Note: We form 1 vector from scratch and then duplicate as needed.
129: */
130: VecCreate(PETSC_COMM_WORLD,&u);
131: VecSetSizes(u,PETSC_DECIDE,dim);
132: VecSetFromOptions(u);
133: VecDuplicate(u,&b);
134: VecDuplicate(b,&x);
136: /*
137: Set exact solution; then compute right-hand-side vector.
138: */
140: if (use_random) {
141: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
142: PetscRandomSetFromOptions(rctx);
143: VecSetRandom(u,rctx);
144: } else {
145: VecSet(u,pfive);
146: }
147: MatMult(A,u,b);
149: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: Create the linear solver and set various options
151: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153: /*
154: Create linear solver context
155: */
156: KSPCreate(PETSC_COMM_WORLD,&ksp);
158: /*
159: Set operators. Here the matrix that defines the linear system
160: also serves as the preconditioning matrix.
161: */
162: KSPSetOperators(ksp,A,A);
164: /*
165: Set runtime options, e.g.,
166: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
167: */
168: KSPSetFromOptions(ksp);
170: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
171: Solve the linear system
172: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
174: KSPSolve(ksp,b,x);
176: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: Check solution and clean up
178: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
180: /*
181: Print the first 3 entries of x; this demonstrates extraction of the
182: real and imaginary components of the complex vector, x.
183: */
184: flg = PETSC_FALSE;
185: PetscOptionsGetBool(NULL,NULL,"-print_x3",&flg,NULL);
186: if (flg) {
187: VecGetArray(x,&xa);
188: PetscPrintf(PETSC_COMM_WORLD,"The first three entries of x are:\n");
189: for (i=0; i<3; i++) {
190: PetscPrintf(PETSC_COMM_WORLD,"x[%D] = %g + %g i\n",i,(double)PetscRealPart(xa[i]),(double)PetscImaginaryPart(xa[i]));
191: }
192: VecRestoreArray(x,&xa);
193: }
195: /*
196: Check the error
197: */
198: VecAXPY(x,none,u);
199: VecNorm(x,NORM_2,&norm);
200: KSPGetIterationNumber(ksp,&its);
201: if (norm < 1.e-12) {
202: PetscPrintf(PETSC_COMM_WORLD,"Norm of error < 1.e-12 iterations %D\n",its);
203: } else {
204: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);
205: }
207: /*
208: Free work space. All PETSc objects should be destroyed when they
209: are no longer needed.
210: */
211: KSPDestroy(&ksp);
212: if (use_random) PetscRandomDestroy(&rctx);
213: VecDestroy(&u)); PetscCall(VecDestroy(&x);
214: VecDestroy(&b)); PetscCall(MatDestroy(&A);
215: PetscFinalize();
216: return 0;
217: }
219: /*TEST
221: build:
222: requires: complex
224: test:
225: args: -n 6 -norandom -pc_type none -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
227: testset:
228: suffix: deflation
229: args: -norandom -pc_type deflation -ksp_monitor_short
230: requires: superlu_dist
232: test:
233: nsize: 6
235: test:
236: nsize: 3
237: args: -pc_deflation_compute_space {{db2 aggregation}}
239: test:
240: suffix: pc_deflation_init_only-0
241: nsize: 4
242: args: -ksp_type fgmres -pc_deflation_compute_space db4 -pc_deflation_compute_space_size 2 -pc_deflation_levels 2 -deflation_ksp_max_it 10
243: #TODO remove suffix and next test when this works
244: #args: -pc_deflation_init_only {{0 1}separate output}
245: args: -pc_deflation_init_only 0
247: test:
248: suffix: pc_deflation_init_only-1
249: nsize: 4
250: args: -ksp_type fgmres -pc_deflation_compute_space db4 -pc_deflation_compute_space_size 2 -pc_deflation_levels 2 -deflation_ksp_max_it 10
251: args: -pc_deflation_init_only 1
253: TEST*/