Actual source code: ex4.c
2: static char help[] ="Solves a simple time-dependent linear PDE (the heat equation).\n\
3: Input parameters include:\n\
4: -m <points>, where <points> = number of grid points\n\
5: -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
6: -debug : Activate debugging printouts\n\
7: -nox : Deactivate x-window graphics\n\n";
9: /*
10: Concepts: TS^time-dependent linear problems
11: Concepts: TS^heat equation
12: Concepts: TS^diffusion equation
13: Processors: n
14: */
16: /* ------------------------------------------------------------------------
18: This program solves the one-dimensional heat equation (also called the
19: diffusion equation),
20: u_t = u_xx,
21: on the domain 0 <= x <= 1, with the boundary conditions
22: u(t,0) = 0, u(t,1) = 0,
23: and the initial condition
24: u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
25: This is a linear, second-order, parabolic equation.
27: We discretize the right-hand side using finite differences with
28: uniform grid spacing h:
29: u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
30: We then demonstrate time evolution using the various TS methods by
31: running the program via
32: mpiexec -n <procs> ex3 -ts_type <timestepping solver>
34: We compare the approximate solution with the exact solution, given by
35: u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
36: 3*exp(-4*pi*pi*t) * sin(2*pi*x)
38: Notes:
39: This code demonstrates the TS solver interface to two variants of
40: linear problems, u_t = f(u,t), namely
41: - time-dependent f: f(u,t) is a function of t
42: - time-independent f: f(u,t) is simply f(u)
44: The uniprocessor version of this code is ts/tutorials/ex3.c
46: ------------------------------------------------------------------------- */
48: /*
49: Include "petscdmda.h" so that we can use distributed arrays (DMDAs) to manage
50: the parallel grid. Include "petscts.h" so that we can use TS solvers.
51: Note that this file automatically includes:
52: petscsys.h - base PETSc routines petscvec.h - vectors
53: petscmat.h - matrices
54: petscis.h - index sets petscksp.h - Krylov subspace methods
55: petscviewer.h - viewers petscpc.h - preconditioners
56: petscksp.h - linear solvers petscsnes.h - nonlinear solvers
57: */
59: #include <petscdm.h>
60: #include <petscdmda.h>
61: #include <petscts.h>
62: #include <petscdraw.h>
64: /*
65: User-defined application context - contains data needed by the
66: application-provided call-back routines.
67: */
68: typedef struct {
69: MPI_Comm comm; /* communicator */
70: DM da; /* distributed array data structure */
71: Vec localwork; /* local ghosted work vector */
72: Vec u_local; /* local ghosted approximate solution vector */
73: Vec solution; /* global exact solution vector */
74: PetscInt m; /* total number of grid points */
75: PetscReal h; /* mesh width h = 1/(m-1) */
76: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
77: PetscViewer viewer1,viewer2; /* viewers for the solution and error */
78: PetscReal norm_2,norm_max; /* error norms */
79: } AppCtx;
81: /*
82: User-defined routines
83: */
84: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
85: extern PetscErrorCode RHSMatrixHeat(TS,PetscReal,Vec,Mat,Mat,void*);
86: extern PetscErrorCode RHSFunctionHeat(TS,PetscReal,Vec,Vec,void*);
87: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
88: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
90: int main(int argc,char **argv)
91: {
92: AppCtx appctx; /* user-defined application context */
93: TS ts; /* timestepping context */
94: Mat A; /* matrix data structure */
95: Vec u; /* approximate solution vector */
96: PetscReal time_total_max = 1.0; /* default max total time */
97: PetscInt time_steps_max = 100; /* default max timesteps */
98: PetscDraw draw; /* drawing context */
99: PetscInt steps,m;
100: PetscMPIInt size;
101: PetscReal dt,ftime;
102: PetscBool flg;
103: TSProblemType tsproblem = TS_LINEAR;
105: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
106: Initialize program and set problem parameters
107: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109: PetscInitialize(&argc,&argv,(char*)0,help);
110: appctx.comm = PETSC_COMM_WORLD;
112: m = 60;
113: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
114: PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug);
115: appctx.m = m;
116: appctx.h = 1.0/(m-1.0);
117: appctx.norm_2 = 0.0;
118: appctx.norm_max = 0.0;
120: MPI_Comm_size(PETSC_COMM_WORLD,&size);
121: PetscPrintf(PETSC_COMM_WORLD,"Solving a linear TS problem, number of processors = %d\n",size);
123: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124: Create vector data structures
125: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
126: /*
127: Create distributed array (DMDA) to manage parallel grid and vectors
128: and to set up the ghost point communication pattern. There are M
129: total grid values spread equally among all the processors.
130: */
132: DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,m,1,1,NULL,&appctx.da);
133: DMSetFromOptions(appctx.da);
134: DMSetUp(appctx.da);
136: /*
137: Extract global and local vectors from DMDA; we use these to store the
138: approximate solution. Then duplicate these for remaining vectors that
139: have the same types.
140: */
141: DMCreateGlobalVector(appctx.da,&u);
142: DMCreateLocalVector(appctx.da,&appctx.u_local);
144: /*
145: Create local work vector for use in evaluating right-hand-side function;
146: create global work vector for storing exact solution.
147: */
148: VecDuplicate(appctx.u_local,&appctx.localwork);
149: VecDuplicate(u,&appctx.solution);
151: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152: Set up displays to show graphs of the solution and error
153: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
155: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,380,400,160,&appctx.viewer1);
156: PetscViewerDrawGetDraw(appctx.viewer1,0,&draw);
157: PetscDrawSetDoubleBuffer(draw);
158: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,0,400,160,&appctx.viewer2);
159: PetscViewerDrawGetDraw(appctx.viewer2,0,&draw);
160: PetscDrawSetDoubleBuffer(draw);
162: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
163: Create timestepping solver context
164: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
166: TSCreate(PETSC_COMM_WORLD,&ts);
168: flg = PETSC_FALSE;
169: PetscOptionsGetBool(NULL,NULL,"-nonlinear",&flg,NULL);
170: TSSetProblemType(ts,flg ? TS_NONLINEAR : TS_LINEAR);
172: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173: Set optional user-defined monitoring routine
174: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175: TSMonitorSet(ts,Monitor,&appctx,NULL);
177: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179: Create matrix data structure; set matrix evaluation routine.
180: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
182: MatCreate(PETSC_COMM_WORLD,&A);
183: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m);
184: MatSetFromOptions(A);
185: MatSetUp(A);
187: flg = PETSC_FALSE;
188: PetscOptionsGetBool(NULL,NULL,"-time_dependent_rhs",&flg,NULL);
189: if (flg) {
190: /*
191: For linear problems with a time-dependent f(u,t) in the equation
192: u_t = f(u,t), the user provides the discretized right-hand-side
193: as a time-dependent matrix.
194: */
195: TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx);
196: TSSetRHSJacobian(ts,A,A,RHSMatrixHeat,&appctx);
197: } else {
198: /*
199: For linear problems with a time-independent f(u) in the equation
200: u_t = f(u), the user provides the discretized right-hand-side
201: as a matrix only once, and then sets a null matrix evaluation
202: routine.
203: */
204: RHSMatrixHeat(ts,0.0,u,A,A,&appctx);
205: TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx);
206: TSSetRHSJacobian(ts,A,A,TSComputeRHSJacobianConstant,&appctx);
207: }
209: if (tsproblem == TS_NONLINEAR) {
210: SNES snes;
211: TSSetRHSFunction(ts,NULL,RHSFunctionHeat,&appctx);
212: TSGetSNES(ts,&snes);
213: SNESSetJacobian(snes,NULL,NULL,SNESComputeJacobianDefault,NULL);
214: }
216: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
217: Set solution vector and initial timestep
218: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
220: dt = appctx.h*appctx.h/2.0;
221: TSSetTimeStep(ts,dt);
222: TSSetSolution(ts,u);
224: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
225: Customize timestepping solver:
226: - Set the solution method to be the Backward Euler method.
227: - Set timestepping duration info
228: Then set runtime options, which can override these defaults.
229: For example,
230: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
231: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
232: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
234: TSSetMaxSteps(ts,time_steps_max);
235: TSSetMaxTime(ts,time_total_max);
236: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
237: TSSetFromOptions(ts);
239: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
240: Solve the problem
241: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
243: /*
244: Evaluate initial conditions
245: */
246: InitialConditions(u,&appctx);
248: /*
249: Run the timestepping solver
250: */
251: TSSolve(ts,u);
252: TSGetSolveTime(ts,&ftime);
253: TSGetStepNumber(ts,&steps);
255: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
256: View timestepping solver info
257: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
258: PetscPrintf(PETSC_COMM_WORLD,"Total timesteps %D, Final time %g\n",steps,(double)ftime);
259: PetscPrintf(PETSC_COMM_WORLD,"Avg. error (2 norm) = %g Avg. error (max norm) = %g\n",(double)(appctx.norm_2/steps),(double)(appctx.norm_max/steps));
261: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
262: Free work space. All PETSc objects should be destroyed when they
263: are no longer needed.
264: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
266: TSDestroy(&ts);
267: MatDestroy(&A);
268: VecDestroy(&u);
269: PetscViewerDestroy(&appctx.viewer1);
270: PetscViewerDestroy(&appctx.viewer2);
271: VecDestroy(&appctx.localwork);
272: VecDestroy(&appctx.solution);
273: VecDestroy(&appctx.u_local);
274: DMDestroy(&appctx.da);
276: /*
277: Always call PetscFinalize() before exiting a program. This routine
278: - finalizes the PETSc libraries as well as MPI
279: - provides summary and diagnostic information if certain runtime
280: options are chosen (e.g., -log_view).
281: */
282: PetscFinalize();
283: return 0;
284: }
285: /* --------------------------------------------------------------------- */
286: /*
287: InitialConditions - Computes the solution at the initial time.
289: Input Parameter:
290: u - uninitialized solution vector (global)
291: appctx - user-defined application context
293: Output Parameter:
294: u - vector with solution at initial time (global)
295: */
296: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
297: {
298: PetscScalar *u_localptr,h = appctx->h;
299: PetscInt i,mybase,myend;
301: /*
302: Determine starting point of each processor's range of
303: grid values.
304: */
305: VecGetOwnershipRange(u,&mybase,&myend);
307: /*
308: Get a pointer to vector data.
309: - For default PETSc vectors, VecGetArray() returns a pointer to
310: the data array. Otherwise, the routine is implementation dependent.
311: - You MUST call VecRestoreArray() when you no longer need access to
312: the array.
313: - Note that the Fortran interface to VecGetArray() differs from the
314: C version. See the users manual for details.
315: */
316: VecGetArray(u,&u_localptr);
318: /*
319: We initialize the solution array by simply writing the solution
320: directly into the array locations. Alternatively, we could use
321: VecSetValues() or VecSetValuesLocal().
322: */
323: for (i=mybase; i<myend; i++) u_localptr[i-mybase] = PetscSinScalar(PETSC_PI*i*6.*h) + 3.*PetscSinScalar(PETSC_PI*i*2.*h);
325: /*
326: Restore vector
327: */
328: VecRestoreArray(u,&u_localptr);
330: /*
331: Print debugging information if desired
332: */
333: if (appctx->debug) {
334: PetscPrintf(appctx->comm,"initial guess vector\n");
335: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
336: }
338: return 0;
339: }
340: /* --------------------------------------------------------------------- */
341: /*
342: ExactSolution - Computes the exact solution at a given time.
344: Input Parameters:
345: t - current time
346: solution - vector in which exact solution will be computed
347: appctx - user-defined application context
349: Output Parameter:
350: solution - vector with the newly computed exact solution
351: */
352: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
353: {
354: PetscScalar *s_localptr,h = appctx->h,ex1,ex2,sc1,sc2;
355: PetscInt i,mybase,myend;
357: /*
358: Determine starting and ending points of each processor's
359: range of grid values
360: */
361: VecGetOwnershipRange(solution,&mybase,&myend);
363: /*
364: Get a pointer to vector data.
365: */
366: VecGetArray(solution,&s_localptr);
368: /*
369: Simply write the solution directly into the array locations.
370: Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
371: */
372: ex1 = PetscExpReal(-36.*PETSC_PI*PETSC_PI*t); ex2 = PetscExpReal(-4.*PETSC_PI*PETSC_PI*t);
373: sc1 = PETSC_PI*6.*h; sc2 = PETSC_PI*2.*h;
374: for (i=mybase; i<myend; i++) s_localptr[i-mybase] = PetscSinScalar(sc1*(PetscReal)i)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i)*ex2;
376: /*
377: Restore vector
378: */
379: VecRestoreArray(solution,&s_localptr);
380: return 0;
381: }
382: /* --------------------------------------------------------------------- */
383: /*
384: Monitor - User-provided routine to monitor the solution computed at
385: each timestep. This example plots the solution and computes the
386: error in two different norms.
388: Input Parameters:
389: ts - the timestep context
390: step - the count of the current step (with 0 meaning the
391: initial condition)
392: time - the current time
393: u - the solution at this timestep
394: ctx - the user-provided context for this monitoring routine.
395: In this case we use the application context which contains
396: information about the problem size, workspace and the exact
397: solution.
398: */
399: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
400: {
401: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
402: PetscReal norm_2,norm_max;
404: /*
405: View a graph of the current iterate
406: */
407: VecView(u,appctx->viewer2);
409: /*
410: Compute the exact solution
411: */
412: ExactSolution(time,appctx->solution,appctx);
414: /*
415: Print debugging information if desired
416: */
417: if (appctx->debug) {
418: PetscPrintf(appctx->comm,"Computed solution vector\n");
419: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
420: PetscPrintf(appctx->comm,"Exact solution vector\n");
421: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
422: }
424: /*
425: Compute the 2-norm and max-norm of the error
426: */
427: VecAXPY(appctx->solution,-1.0,u);
428: VecNorm(appctx->solution,NORM_2,&norm_2);
429: norm_2 = PetscSqrtReal(appctx->h)*norm_2;
430: VecNorm(appctx->solution,NORM_MAX,&norm_max);
431: if (norm_2 < 1e-14) norm_2 = 0;
432: if (norm_max < 1e-14) norm_max = 0;
434: /*
435: PetscPrintf() causes only the first processor in this
436: communicator to print the timestep information.
437: */
438: PetscPrintf(appctx->comm,"Timestep %D: time = %g 2-norm error = %g max norm error = %g\n",step,(double)time,(double)norm_2,(double)norm_max);
439: appctx->norm_2 += norm_2;
440: appctx->norm_max += norm_max;
442: /*
443: View a graph of the error
444: */
445: VecView(appctx->solution,appctx->viewer1);
447: /*
448: Print debugging information if desired
449: */
450: if (appctx->debug) {
451: PetscPrintf(appctx->comm,"Error vector\n");
452: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
453: }
455: return 0;
456: }
458: /* --------------------------------------------------------------------- */
459: /*
460: RHSMatrixHeat - User-provided routine to compute the right-hand-side
461: matrix for the heat equation.
463: Input Parameters:
464: ts - the TS context
465: t - current time
466: global_in - global input vector
467: dummy - optional user-defined context, as set by TSetRHSJacobian()
469: Output Parameters:
470: AA - Jacobian matrix
471: BB - optionally different preconditioning matrix
472: str - flag indicating matrix structure
474: Notes:
475: RHSMatrixHeat computes entries for the locally owned part of the system.
476: - Currently, all PETSc parallel matrix formats are partitioned by
477: contiguous chunks of rows across the processors.
478: - Each processor needs to insert only elements that it owns
479: locally (but any non-local elements will be sent to the
480: appropriate processor during matrix assembly).
481: - Always specify global row and columns of matrix entries when
482: using MatSetValues(); we could alternatively use MatSetValuesLocal().
483: - Here, we set all entries for a particular row at once.
484: - Note that MatSetValues() uses 0-based row and column numbers
485: in Fortran as well as in C.
486: */
487: PetscErrorCode RHSMatrixHeat(TS ts,PetscReal t,Vec X,Mat AA,Mat BB,void *ctx)
488: {
489: Mat A = AA; /* Jacobian matrix */
490: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
491: PetscInt i,mstart,mend,idx[3];
492: PetscScalar v[3],stwo = -2./(appctx->h*appctx->h),sone = -.5*stwo;
494: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
495: Compute entries for the locally owned part of the matrix
496: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
498: MatGetOwnershipRange(A,&mstart,&mend);
500: /*
501: Set matrix rows corresponding to boundary data
502: */
504: if (mstart == 0) { /* first processor only */
505: v[0] = 1.0;
506: MatSetValues(A,1,&mstart,1,&mstart,v,INSERT_VALUES);
507: mstart++;
508: }
510: if (mend == appctx->m) { /* last processor only */
511: mend--;
512: v[0] = 1.0;
513: MatSetValues(A,1,&mend,1,&mend,v,INSERT_VALUES);
514: }
516: /*
517: Set matrix rows corresponding to interior data. We construct the
518: matrix one row at a time.
519: */
520: v[0] = sone; v[1] = stwo; v[2] = sone;
521: for (i=mstart; i<mend; i++) {
522: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
523: MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES);
524: }
526: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
527: Complete the matrix assembly process and set some options
528: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
529: /*
530: Assemble matrix, using the 2-step process:
531: MatAssemblyBegin(), MatAssemblyEnd()
532: Computations can be done while messages are in transition
533: by placing code between these two statements.
534: */
535: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
536: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
538: /*
539: Set and option to indicate that we will never add a new nonzero location
540: to the matrix. If we do, it will generate an error.
541: */
542: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
544: return 0;
545: }
547: PetscErrorCode RHSFunctionHeat(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
548: {
549: Mat A;
552: TSGetRHSJacobian(ts,&A,NULL,NULL,&ctx);
553: RHSMatrixHeat(ts,t,globalin,A,NULL,ctx);
554: /* MatView(A,PETSC_VIEWER_STDOUT_WORLD); */
555: MatMult(A,globalin,globalout);
556: return 0;
557: }
559: /*TEST
561: test:
562: args: -ts_view -nox
564: test:
565: suffix: 2
566: args: -ts_view -nox
567: nsize: 3
569: test:
570: suffix: 3
571: args: -ts_view -nox -nonlinear
573: test:
574: suffix: 4
575: args: -ts_view -nox -nonlinear
576: nsize: 3
577: timeoutfactor: 3
579: test:
580: suffix: sundials
581: requires: sundials2
582: args: -nox -ts_type sundials -ts_max_steps 5 -nonlinear
583: nsize: 4
585: test:
586: suffix: sundials_dense
587: requires: sundials2
588: args: -nox -ts_type sundials -ts_sundials_use_dense -ts_max_steps 5 -nonlinear
589: nsize: 1
591: TEST*/