Actual source code: ex46.c
1: static char help[] = "Surface processes in geophysics.\n\n";
3: /*T
4: Concepts: SNES^parallel Surface process example
5: Concepts: DMDA^using distributed arrays;
6: Concepts: IS coloirng types;
7: Processors: n
8: T*/
10: #include <petscsnes.h>
11: #include <petscdm.h>
12: #include <petscdmda.h>
14: /*
15: User-defined application context - contains data needed by the
16: application-provided call-back routines, FormJacobianLocal() and
17: FormFunctionLocal().
18: */
19: typedef struct {
20: PetscReal D; /* The diffusion coefficient */
21: PetscReal K; /* The advection coefficient */
22: PetscInt m; /* Exponent for A */
23: } AppCtx;
25: /*
26: User-defined routines
27: */
28: extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
29: extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,AppCtx*);
31: int main(int argc,char **argv)
32: {
33: SNES snes; /* nonlinear solver */
34: AppCtx user; /* user-defined work context */
35: PetscInt its; /* iterations for convergence */
37: DM da;
39: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
40: Initialize program
41: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
43: PetscInitialize(&argc,&argv,(char*)0,help);
45: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
46: Initialize problem parameters
47: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
48: PetscOptionsBegin(PETSC_COMM_WORLD, "", "Surface Process Problem Options", "SNES");
49: user.D = 1.0;
50: PetscOptionsReal("-D", "The diffusion coefficient D", __FILE__, user.D, &user.D, NULL);
51: user.K = 1.0;
52: PetscOptionsReal("-K", "The advection coefficient K", __FILE__, user.K, &user.K, NULL);
53: user.m = 1;
54: PetscOptionsInt("-m", "The exponent for A", __FILE__, user.m, &user.m, NULL);
55: PetscOptionsEnd();
57: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
58: Create distributed array (DMDA) to manage parallel grid and vectors
59: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
60: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da);
61: DMSetFromOptions(da);
62: DMSetUp(da);
63: DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
64: DMSetApplicationContext(da,&user);
65: SNESCreate(PETSC_COMM_WORLD, &snes);
66: SNESSetDM(snes, da);
68: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
69: Set local function evaluation routine
70: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
71: DMDASNESSetFunctionLocal(da,INSERT_VALUES,(PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*))FormFunctionLocal,&user);
73: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
74: Customize solver; set runtime options
75: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
76: SNESSetFromOptions(snes);
78: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
79: Solve nonlinear system
80: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
81: SNESSolve(snes,0,0);
82: SNESGetIterationNumber(snes,&its);
84: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
85: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
86: PetscPrintf(PETSC_COMM_WORLD,"Number of SNES iterations = %D\n",its);
88: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
89: Free work space. All PETSc objects should be destroyed when they
90: are no longer needed.
91: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
93: SNESDestroy(&snes);
94: DMDestroy(&da);
96: PetscFinalize();
97: return 0;
98: }
100: PetscScalar funcU(DMDACoor2d *coords)
101: {
102: return coords->x + coords->y;
103: }
105: PetscScalar funcA(PetscScalar z, AppCtx *user)
106: {
107: PetscScalar v = 1.0;
108: PetscInt i;
110: for (i = 0; i < user->m; ++i) v *= z;
111: return v;
112: }
114: PetscScalar funcADer(PetscScalar z, AppCtx *user)
115: {
116: PetscScalar v = 1.0;
117: PetscInt i;
119: for (i = 0; i < user->m-1; ++i) v *= z;
120: return (PetscScalar)user->m*v;
121: }
123: /*
124: FormFunctionLocal - Evaluates nonlinear function, F(x).
125: */
126: PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
127: {
128: DM coordDA;
129: Vec coordinates;
130: DMDACoor2d **coords;
131: PetscScalar u, ux, uy, uxx, uyy;
132: PetscReal D, K, hx, hy, hxdhy, hydhx;
133: PetscInt i,j;
136: D = user->D;
137: K = user->K;
138: hx = 1.0/(PetscReal)(info->mx-1);
139: hy = 1.0/(PetscReal)(info->my-1);
140: hxdhy = hx/hy;
141: hydhx = hy/hx;
142: /*
143: Compute function over the locally owned part of the grid
144: */
145: DMGetCoordinateDM(info->da, &coordDA);
146: DMGetCoordinates(info->da, &coordinates);
147: DMDAVecGetArray(coordDA, coordinates, &coords);
148: for (j=info->ys; j<info->ys+info->ym; j++) {
149: for (i=info->xs; i<info->xs+info->xm; i++) {
150: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) f[j][i] = x[j][i];
151: else {
152: u = x[j][i];
153: ux = (x[j][i+1] - x[j][i])/hx;
154: uy = (x[j+1][i] - x[j][i])/hy;
155: uxx = (2.0*u - x[j][i-1] - x[j][i+1])*hydhx;
156: uyy = (2.0*u - x[j-1][i] - x[j+1][i])*hxdhy;
157: f[j][i] = D*(uxx + uyy) - (K*funcA(x[j][i], user)*PetscSqrtScalar(ux*ux + uy*uy) + funcU(&coords[j][i]))*hx*hy;
159: }
160: }
161: }
162: DMDAVecRestoreArray(coordDA, coordinates, &coords);
163: PetscLogFlops(11.0*info->ym*info->xm);
164: return 0;
165: }
167: /*
168: FormJacobianLocal - Evaluates Jacobian matrix.
169: */
170: PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,AppCtx *user)
171: {
172: MatStencil col[5], row;
173: PetscScalar D, K, A, v[5], hx, hy, hxdhy, hydhx, ux, uy;
174: PetscReal normGradZ;
175: PetscInt i, j,k;
178: D = user->D;
179: K = user->K;
180: hx = 1.0/(PetscReal)(info->mx-1);
181: hy = 1.0/(PetscReal)(info->my-1);
182: hxdhy = hx/hy;
183: hydhx = hy/hx;
185: /*
186: Compute entries for the locally owned part of the Jacobian.
187: - Currently, all PETSc parallel matrix formats are partitioned by
188: contiguous chunks of rows across the processors.
189: - Each processor needs to insert only elements that it owns
190: locally (but any non-local elements will be sent to the
191: appropriate processor during matrix assembly).
192: - Here, we set all entries for a particular row at once.
193: - We can set matrix entries either using either
194: MatSetValuesLocal() or MatSetValues(), as discussed above.
195: */
196: for (j=info->ys; j<info->ys+info->ym; j++) {
197: for (i=info->xs; i<info->xs+info->xm; i++) {
198: row.j = j; row.i = i;
199: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
200: /* boundary points */
201: v[0] = 1.0;
202: MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES);
203: } else {
204: /* interior grid points */
205: ux = (x[j][i+1] - x[j][i])/hx;
206: uy = (x[j+1][i] - x[j][i])/hy;
207: normGradZ = PetscRealPart(PetscSqrtScalar(ux*ux + uy*uy));
208: if (normGradZ < 1.0e-8) normGradZ = 1.0e-8;
209: A = funcA(x[j][i], user);
211: v[0] = -D*hxdhy; col[0].j = j - 1; col[0].i = i;
212: v[1] = -D*hydhx; col[1].j = j; col[1].i = i-1;
213: v[2] = D*2.0*(hydhx + hxdhy) + K*(funcADer(x[j][i], user)*normGradZ - A/normGradZ)*hx*hy; col[2].j = row.j; col[2].i = row.i;
214: v[3] = -D*hydhx + K*A*hx*hy/(2.0*normGradZ); col[3].j = j; col[3].i = i+1;
215: v[4] = -D*hxdhy + K*A*hx*hy/(2.0*normGradZ); col[4].j = j + 1; col[4].i = i;
216: for (k = 0; k < 5; ++k) {
218: }
219: MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES);
220: }
221: }
222: }
224: /*
225: Assemble matrix, using the 2-step process:
226: MatAssemblyBegin(), MatAssemblyEnd().
227: */
228: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
229: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
230: /*
231: Tell the matrix we will never add a new nonzero location to the
232: matrix. If we do, it will generate an error.
233: */
234: MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
235: return 0;
236: }
238: /*TEST
240: test:
241: args: -snes_view -snes_monitor_short -da_refine 1 -pc_type mg -ksp_type fgmres -pc_mg_type full -mg_levels_ksp_chebyshev_esteig 0.5,1.1
243: test:
244: suffix: ew_1
245: args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 1
246: requires: !single
248: test:
249: suffix: ew_2
250: args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 2
252: test:
253: suffix: ew_3
254: args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 3
255: requires: !single
257: test:
258: suffix: fm_rise_2
259: args: -K 3 -m 1 -D 0.2 -snes_monitor_short -snes_type ngmres -snes_npc_side right -npc_snes_type newtonls -npc_snes_linesearch_type basic -snes_ngmres_restart_it 1 -snes_ngmres_restart_fm_rise
260: requires: !single
262: test:
263: suffix: fm_rise_4
264: args: -K 3 -m 1 -D 0.2 -snes_monitor_short -snes_type ngmres -snes_npc_side right -npc_snes_type newtonls -npc_snes_linesearch_type basic -snes_ngmres_restart_it 2 -snes_ngmres_restart_fm_rise -snes_rtol 1.e-2 -snes_max_it 5
266: TEST*/