Actual source code: ex12.c


  2: static char help[] ="Tests PetscObjectSetOptions() for TS object\n\n";

  4: /*
  5:    Concepts: TS^time-dependent nonlinear problems
  6:    Processors: n
  7: */

  9: /* ------------------------------------------------------------------------

 11:    This program solves the PDE

 13:                u * u_xx
 14:          u_t = ---------
 15:                2*(t+1)^2

 17:     on the domain 0 <= x <= 1, with boundary conditions
 18:          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
 19:     and initial condition
 20:          u(0,x) = 1 + x*x.

 22:     The exact solution is:
 23:          u(t,x) = (1 + x*x) * (1 + t)

 25:     Note that since the solution is linear in time and quadratic in x,
 26:     the finite difference scheme actually computes the "exact" solution.

 28:     We use by default the backward Euler method.

 30:   ------------------------------------------------------------------------- */

 32: /*
 33:    Include "petscts.h" to use the PETSc timestepping routines. Note that
 34:    this file automatically includes "petscsys.h" and other lower-level
 35:    PETSc include files.

 37:    Include the "petscdmda.h" to allow us to use the distributed array data
 38:    structures to manage the parallel grid.
 39: */
 40: #include <petscts.h>
 41: #include <petscdm.h>
 42: #include <petscdmda.h>
 43: #include <petscdraw.h>

 45: /*
 46:    User-defined application context - contains data needed by the
 47:    application-provided callback routines.
 48: */
 49: typedef struct {
 50:   MPI_Comm  comm;           /* communicator */
 51:   DM        da;             /* distributed array data structure */
 52:   Vec       localwork;      /* local ghosted work vector */
 53:   Vec       u_local;        /* local ghosted approximate solution vector */
 54:   Vec       solution;       /* global exact solution vector */
 55:   PetscInt  m;              /* total number of grid points */
 56:   PetscReal h;              /* mesh width: h = 1/(m-1) */
 57: } AppCtx;

 59: /*
 60:    User-defined routines, provided below.
 61: */
 62: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
 63: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
 64: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
 65: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);

 67: int main(int argc,char **argv)
 68: {
 69:   AppCtx         appctx;                 /* user-defined application context */
 70:   TS             ts;                     /* timestepping context */
 71:   Mat            A;                      /* Jacobian matrix data structure */
 72:   Vec            u;                      /* approximate solution vector */
 73:   PetscInt       time_steps_max = 100;  /* default max timesteps */
 74:   PetscReal      dt;
 75:   PetscReal      time_total_max = 100.0; /* default max total time */
 76:   PetscOptions   options,optionscopy;

 78:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 79:      Initialize program and set problem parameters
 80:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 82:   PetscInitialize(&argc,&argv,(char*)0,help);

 84:   PetscOptionsCreate(&options);
 85:   PetscOptionsSetValue(options,"-ts_monitor","ascii");
 86:   PetscOptionsSetValue(options,"-snes_monitor","ascii");
 87:   PetscOptionsSetValue(options,"-ksp_monitor","ascii");

 89:   appctx.comm = PETSC_COMM_WORLD;
 90:   appctx.m    = 60;

 92:   PetscOptionsGetInt(options,NULL,"-M",&appctx.m,NULL);

 94:   appctx.h    = 1.0/(appctx.m-1.0);

 96:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 97:      Create vector data structures
 98:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

100:   /*
101:      Create distributed array (DMDA) to manage parallel grid and vectors
102:      and to set up the ghost point communication pattern.  There are M
103:      total grid values spread equally among all the processors.
104:   */
105:   DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da);
106:   PetscObjectSetOptions((PetscObject)appctx.da,options);
107:   DMSetFromOptions(appctx.da);
108:   DMSetUp(appctx.da);

110:   /*
111:      Extract global and local vectors from DMDA; we use these to store the
112:      approximate solution.  Then duplicate these for remaining vectors that
113:      have the same types.
114:   */
115:   DMCreateGlobalVector(appctx.da,&u);
116:   DMCreateLocalVector(appctx.da,&appctx.u_local);

118:   /*
119:      Create local work vector for use in evaluating right-hand-side function;
120:      create global work vector for storing exact solution.
121:   */
122:   VecDuplicate(appctx.u_local,&appctx.localwork);
123:   VecDuplicate(u,&appctx.solution);

125:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126:      Create timestepping solver context; set callback routine for
127:      right-hand-side function evaluation.
128:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

130:   TSCreate(PETSC_COMM_WORLD,&ts);
131:   PetscObjectSetOptions((PetscObject)ts,options);
132:   TSSetProblemType(ts,TS_NONLINEAR);
133:   TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);

135:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
136:      For nonlinear problems, the user can provide a Jacobian evaluation
137:      routine (or use a finite differencing approximation).

139:      Create matrix data structure; set Jacobian evaluation routine.
140:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

142:   MatCreate(PETSC_COMM_WORLD,&A);
143:   PetscObjectSetOptions((PetscObject)A,options);
144:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
145:   MatSetFromOptions(A);
146:   MatSetUp(A);
147:   TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);

149:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150:      Set solution vector and initial timestep
151:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

153:   dt   = appctx.h/2.0;
154:   TSSetTimeStep(ts,dt);

156:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
157:      Customize timestepping solver:
158:        - Set the solution method to be the Backward Euler method.
159:        - Set timestepping duration info
160:      Then set runtime options, which can override these defaults.
161:      For example,
162:           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
163:      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
164:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

166:   TSSetType(ts,TSBEULER);
167:   TSSetMaxSteps(ts,time_steps_max);
168:   TSSetMaxTime(ts,time_total_max);
169:   TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
170:   TSSetFromOptions(ts);

172:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173:      Solve the problem
174:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

176:   /*
177:      Evaluate initial conditions
178:   */
179:   InitialConditions(u,&appctx);

181:   /*
182:      Run the timestepping solver
183:   */
184:   TSSolve(ts,u);

186:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
187:      Free work space.  All PETSc objects should be destroyed when they
188:      are no longer needed.
189:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

191:   PetscObjectGetOptions((PetscObject)ts,&optionscopy);

194:   TSDestroy(&ts);
195:   VecDestroy(&u);
196:   MatDestroy(&A);
197:   DMDestroy(&appctx.da);
198:   VecDestroy(&appctx.localwork);
199:   VecDestroy(&appctx.solution);
200:   VecDestroy(&appctx.u_local);
201:   PetscOptionsDestroy(&options);

203:   /*
204:      Always call PetscFinalize() before exiting a program.  This routine
205:        - finalizes the PETSc libraries as well as MPI
206:        - provides summary and diagnostic information if certain runtime
207:          options are chosen (e.g., -log_view).
208:   */
209:   PetscFinalize();
210:   return 0;
211: }
212: /* --------------------------------------------------------------------- */
213: /*
214:    InitialConditions - Computes the solution at the initial time.

216:    Input Parameters:
217:    u - uninitialized solution vector (global)
218:    appctx - user-defined application context

220:    Output Parameter:
221:    u - vector with solution at initial time (global)
222: */
223: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
224: {
225:   PetscScalar    *u_localptr,h = appctx->h,x;
226:   PetscInt       i,mybase,myend;

228:   /*
229:      Determine starting point of each processor's range of
230:      grid values.
231:   */
232:   VecGetOwnershipRange(u,&mybase,&myend);

234:   /*
235:     Get a pointer to vector data.
236:     - For default PETSc vectors, VecGetArray() returns a pointer to
237:       the data array.  Otherwise, the routine is implementation dependent.
238:     - You MUST call VecRestoreArray() when you no longer need access to
239:       the array.
240:     - Note that the Fortran interface to VecGetArray() differs from the
241:       C version.  See the users manual for details.
242:   */
243:   VecGetArray(u,&u_localptr);

245:   /*
246:      We initialize the solution array by simply writing the solution
247:      directly into the array locations.  Alternatively, we could use
248:      VecSetValues() or VecSetValuesLocal().
249:   */
250:   for (i=mybase; i<myend; i++) {
251:     x = h*(PetscReal)i; /* current location in global grid */
252:     u_localptr[i-mybase] = 1.0 + x*x;
253:   }

255:   /*
256:      Restore vector
257:   */
258:   VecRestoreArray(u,&u_localptr);

260:   return 0;
261: }
262: /* --------------------------------------------------------------------- */
263: /*
264:    ExactSolution - Computes the exact solution at a given time.

266:    Input Parameters:
267:    t - current time
268:    solution - vector in which exact solution will be computed
269:    appctx - user-defined application context

271:    Output Parameter:
272:    solution - vector with the newly computed exact solution
273: */
274: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
275: {
276:   PetscScalar    *s_localptr,h = appctx->h,x;
277:   PetscInt       i,mybase,myend;

279:   /*
280:      Determine starting and ending points of each processor's
281:      range of grid values
282:   */
283:   VecGetOwnershipRange(solution,&mybase,&myend);

285:   /*
286:      Get a pointer to vector data.
287:   */
288:   VecGetArray(solution,&s_localptr);

290:   /*
291:      Simply write the solution directly into the array locations.
292:      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
293:   */
294:   for (i=mybase; i<myend; i++) {
295:     x = h*(PetscReal)i;
296:     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
297:   }

299:   /*
300:      Restore vector
301:   */
302:   VecRestoreArray(solution,&s_localptr);
303:   return 0;
304: }
305: /* --------------------------------------------------------------------- */
306: /*
307:    RHSFunction - User-provided routine that evalues the right-hand-side
308:    function of the ODE.  This routine is set in the main program by
309:    calling TSSetRHSFunction().  We compute:
310:           global_out = F(global_in)

312:    Input Parameters:
313:    ts         - timesteping context
314:    t          - current time
315:    global_in  - vector containing the current iterate
316:    ctx        - (optional) user-provided context for function evaluation.
317:                 In this case we use the appctx defined above.

319:    Output Parameter:
320:    global_out - vector containing the newly evaluated function
321: */
322: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
323: {
324:   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
325:   DM                da        = appctx->da;        /* distributed array */
326:   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
327:   Vec               localwork = appctx->localwork; /* local ghosted work vector */
328:   PetscInt          i,localsize;
329:   PetscMPIInt       rank,size;
330:   PetscScalar       *copyptr,sc;
331:   const PetscScalar *localptr;

333:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
334:      Get ready for local function computations
335:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
336:   /*
337:      Scatter ghost points to local vector, using the 2-step process
338:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
339:      By placing code between these two statements, computations can be
340:      done while messages are in transition.
341:   */
342:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
343:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

345:   /*
346:       Access directly the values in our local INPUT work array
347:   */
348:   VecGetArrayRead(local_in,&localptr);

350:   /*
351:       Access directly the values in our local OUTPUT work array
352:   */
353:   VecGetArray(localwork,&copyptr);

355:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));

357:   /*
358:       Evaluate our function on the nodes owned by this processor
359:   */
360:   VecGetLocalSize(local_in,&localsize);

362:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
363:      Compute entries for the locally owned part
364:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

366:   /*
367:      Handle boundary conditions: This is done by using the boundary condition
368:         u(t,boundary) = g(t,boundary)
369:      for some function g. Now take the derivative with respect to t to obtain
370:         u_{t}(t,boundary) = g_{t}(t,boundary)

372:      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
373:              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
374:   */
375:   MPI_Comm_rank(appctx->comm,&rank);
376:   MPI_Comm_size(appctx->comm,&size);
377:   if (rank == 0)          copyptr[0]           = 1.0;
378:   if (rank == size-1) copyptr[localsize-1] = 2.0;

380:   /*
381:      Handle the interior nodes where the PDE is replace by finite
382:      difference operators.
383:   */
384:   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);

386:   /*
387:      Restore vectors
388:   */
389:   VecRestoreArrayRead(local_in,&localptr);
390:   VecRestoreArray(localwork,&copyptr);

392:   /*
393:      Insert values from the local OUTPUT vector into the global
394:      output vector
395:   */
396:   DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
397:   DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);

399:   return 0;
400: }
401: /* --------------------------------------------------------------------- */
402: /*
403:    RHSJacobian - User-provided routine to compute the Jacobian of
404:    the nonlinear right-hand-side function of the ODE.

406:    Input Parameters:
407:    ts - the TS context
408:    t - current time
409:    global_in - global input vector
410:    dummy - optional user-defined context, as set by TSetRHSJacobian()

412:    Output Parameters:
413:    AA - Jacobian matrix
414:    BB - optionally different preconditioning matrix
415:    str - flag indicating matrix structure

417:   Notes:
418:   RHSJacobian computes entries for the locally owned part of the Jacobian.
419:    - Currently, all PETSc parallel matrix formats are partitioned by
420:      contiguous chunks of rows across the processors.
421:    - Each processor needs to insert only elements that it owns
422:      locally (but any non-local elements will be sent to the
423:      appropriate processor during matrix assembly).
424:    - Always specify global row and columns of matrix entries when
425:      using MatSetValues().
426:    - Here, we set all entries for a particular row at once.
427:    - Note that MatSetValues() uses 0-based row and column numbers
428:      in Fortran as well as in C.
429: */
430: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
431: {
432:   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
433:   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
434:   DM                da       = appctx->da;        /* distributed array */
435:   PetscScalar       v[3],sc;
436:   const PetscScalar *localptr;
437:   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;

439:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
440:      Get ready for local Jacobian computations
441:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
442:   /*
443:      Scatter ghost points to local vector, using the 2-step process
444:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
445:      By placing code between these two statements, computations can be
446:      done while messages are in transition.
447:   */
448:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
449:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

451:   /*
452:      Get pointer to vector data
453:   */
454:   VecGetArrayRead(local_in,&localptr);

456:   /*
457:      Get starting and ending locally owned rows of the matrix
458:   */
459:   MatGetOwnershipRange(BB,&mstarts,&mends);
460:   mstart = mstarts; mend = mends;

462:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
463:      Compute entries for the locally owned part of the Jacobian.
464:       - Currently, all PETSc parallel matrix formats are partitioned by
465:         contiguous chunks of rows across the processors.
466:       - Each processor needs to insert only elements that it owns
467:         locally (but any non-local elements will be sent to the
468:         appropriate processor during matrix assembly).
469:       - Here, we set all entries for a particular row at once.
470:       - We can set matrix entries either using either
471:         MatSetValuesLocal() or MatSetValues().
472:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

474:   /*
475:      Set matrix rows corresponding to boundary data
476:   */
477:   if (mstart == 0) {
478:     v[0] = 0.0;
479:     MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES);
480:     mstart++;
481:   }
482:   if (mend == appctx->m) {
483:     mend--;
484:     v[0] = 0.0;
485:     MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES);
486:   }

488:   /*
489:      Set matrix rows corresponding to interior data.  We construct the
490:      matrix one row at a time.
491:   */
492:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
493:   for (i=mstart; i<mend; i++) {
494:     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
495:     is     = i - mstart + 1;
496:     v[0]   = sc*localptr[is];
497:     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
498:     v[2]   = sc*localptr[is];
499:     MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES);
500:   }

502:   /*
503:      Restore vector
504:   */
505:   VecRestoreArrayRead(local_in,&localptr);

507:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
508:      Complete the matrix assembly process and set some options
509:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
510:   /*
511:      Assemble matrix, using the 2-step process:
512:        MatAssemblyBegin(), MatAssemblyEnd()
513:      Computations can be done while messages are in transition
514:      by placing code between these two statements.
515:   */
516:   MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY);
517:   MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY);
518:   if (BB != AA) {
519:     MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY);
520:     MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY);
521:   }

523:   /*
524:      Set and option to indicate that we will never add a new nonzero location
525:      to the matrix. If we do, it will generate an error.
526:   */
527:   MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);

529:   return 0;
530: }

532: /*TEST

534:     test:
535:       requires: !single

537: TEST*/