Actual source code: ex2.c


  2: static char help[] ="Solves a time-dependent nonlinear PDE. Uses implicit\n\
  3: timestepping.  Runtime options include:\n\
  4:   -M <xg>, where <xg> = number of grid points\n\
  5:   -debug : Activate debugging printouts\n\
  6:   -nox   : Deactivate x-window graphics\n\n";

  8: /*
  9:    Concepts: TS^time-dependent nonlinear problems
 10:    Processors: n
 11: */

 13: /* ------------------------------------------------------------------------

 15:    This program solves the PDE

 17:                u * u_xx
 18:          u_t = ---------
 19:                2*(t+1)^2

 21:     on the domain 0 <= x <= 1, with boundary conditions
 22:          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
 23:     and initial condition
 24:          u(0,x) = 1 + x*x.

 26:     The exact solution is:
 27:          u(t,x) = (1 + x*x) * (1 + t)

 29:     Note that since the solution is linear in time and quadratic in x,
 30:     the finite difference scheme actually computes the "exact" solution.

 32:     We use by default the backward Euler method.

 34:   ------------------------------------------------------------------------- */

 36: /*
 37:    Include "petscts.h" to use the PETSc timestepping routines. Note that
 38:    this file automatically includes "petscsys.h" and other lower-level
 39:    PETSc include files.

 41:    Include the "petscdmda.h" to allow us to use the distributed array data
 42:    structures to manage the parallel grid.
 43: */
 44: #include <petscts.h>
 45: #include <petscdm.h>
 46: #include <petscdmda.h>
 47: #include <petscdraw.h>

 49: /*
 50:    User-defined application context - contains data needed by the
 51:    application-provided callback routines.
 52: */
 53: typedef struct {
 54:   MPI_Comm  comm;           /* communicator */
 55:   DM        da;             /* distributed array data structure */
 56:   Vec       localwork;      /* local ghosted work vector */
 57:   Vec       u_local;        /* local ghosted approximate solution vector */
 58:   Vec       solution;       /* global exact solution vector */
 59:   PetscInt  m;              /* total number of grid points */
 60:   PetscReal h;              /* mesh width: h = 1/(m-1) */
 61:   PetscBool debug;          /* flag (1 indicates activation of debugging printouts) */
 62: } AppCtx;

 64: /*
 65:    User-defined routines, provided below.
 66: */
 67: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
 68: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
 69: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
 70: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
 71: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);

 73: int main(int argc,char **argv)
 74: {
 75:   AppCtx         appctx;                 /* user-defined application context */
 76:   TS             ts;                     /* timestepping context */
 77:   Mat            A;                      /* Jacobian matrix data structure */
 78:   Vec            u;                      /* approximate solution vector */
 79:   PetscInt       time_steps_max = 100;  /* default max timesteps */
 80:   PetscReal      dt;
 81:   PetscReal      time_total_max = 100.0; /* default max total time */
 82:   PetscBool      mymonitor      = PETSC_FALSE;
 83:   PetscReal      bounds[]       = {1.0, 3.3};

 85:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 86:      Initialize program and set problem parameters
 87:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 89:   PetscInitialize(&argc,&argv,(char*)0,help);
 90:   PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds);

 92:   appctx.comm = PETSC_COMM_WORLD;
 93:   appctx.m    = 60;

 95:   PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL);
 96:   PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug);
 97:   PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor);

 99:   appctx.h    = 1.0/(appctx.m-1.0);

101:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
102:      Create vector data structures
103:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

105:   /*
106:      Create distributed array (DMDA) to manage parallel grid and vectors
107:      and to set up the ghost point communication pattern.  There are M
108:      total grid values spread equally among all the processors.
109:   */
110:   DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da);
111:   DMSetFromOptions(appctx.da);
112:   DMSetUp(appctx.da);

114:   /*
115:      Extract global and local vectors from DMDA; we use these to store the
116:      approximate solution.  Then duplicate these for remaining vectors that
117:      have the same types.
118:   */
119:   DMCreateGlobalVector(appctx.da,&u);
120:   DMCreateLocalVector(appctx.da,&appctx.u_local);

122:   /*
123:      Create local work vector for use in evaluating right-hand-side function;
124:      create global work vector for storing exact solution.
125:   */
126:   VecDuplicate(appctx.u_local,&appctx.localwork);
127:   VecDuplicate(u,&appctx.solution);

129:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130:      Create timestepping solver context; set callback routine for
131:      right-hand-side function evaluation.
132:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

134:   TSCreate(PETSC_COMM_WORLD,&ts);
135:   TSSetProblemType(ts,TS_NONLINEAR);
136:   TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);

138:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
139:      Set optional user-defined monitoring routine
140:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

142:   if (mymonitor) {
143:     TSMonitorSet(ts,Monitor,&appctx,NULL);
144:   }

146:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147:      For nonlinear problems, the user can provide a Jacobian evaluation
148:      routine (or use a finite differencing approximation).

150:      Create matrix data structure; set Jacobian evaluation routine.
151:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

153:   MatCreate(PETSC_COMM_WORLD,&A);
154:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
155:   MatSetFromOptions(A);
156:   MatSetUp(A);
157:   TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);

159:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160:      Set solution vector and initial timestep
161:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

163:   dt   = appctx.h/2.0;
164:   TSSetTimeStep(ts,dt);

166:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167:      Customize timestepping solver:
168:        - Set the solution method to be the Backward Euler method.
169:        - Set timestepping duration info
170:      Then set runtime options, which can override these defaults.
171:      For example,
172:           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
173:      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
174:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

176:   TSSetType(ts,TSBEULER);
177:   TSSetMaxSteps(ts,time_steps_max);
178:   TSSetMaxTime(ts,time_total_max);
179:   TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
180:   TSSetFromOptions(ts);

182:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
183:      Solve the problem
184:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

186:   /*
187:      Evaluate initial conditions
188:   */
189:   InitialConditions(u,&appctx);

191:   /*
192:      Run the timestepping solver
193:   */
194:   TSSolve(ts,u);

196:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
197:      Free work space.  All PETSc objects should be destroyed when they
198:      are no longer needed.
199:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

201:   TSDestroy(&ts);
202:   VecDestroy(&u);
203:   MatDestroy(&A);
204:   DMDestroy(&appctx.da);
205:   VecDestroy(&appctx.localwork);
206:   VecDestroy(&appctx.solution);
207:   VecDestroy(&appctx.u_local);

209:   /*
210:      Always call PetscFinalize() before exiting a program.  This routine
211:        - finalizes the PETSc libraries as well as MPI
212:        - provides summary and diagnostic information if certain runtime
213:          options are chosen (e.g., -log_view).
214:   */
215:   PetscFinalize();
216:   return 0;
217: }
218: /* --------------------------------------------------------------------- */
219: /*
220:    InitialConditions - Computes the solution at the initial time.

222:    Input Parameters:
223:    u - uninitialized solution vector (global)
224:    appctx - user-defined application context

226:    Output Parameter:
227:    u - vector with solution at initial time (global)
228: */
229: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
230: {
231:   PetscScalar    *u_localptr,h = appctx->h,x;
232:   PetscInt       i,mybase,myend;

234:   /*
235:      Determine starting point of each processor's range of
236:      grid values.
237:   */
238:   VecGetOwnershipRange(u,&mybase,&myend);

240:   /*
241:     Get a pointer to vector data.
242:     - For default PETSc vectors, VecGetArray() returns a pointer to
243:       the data array.  Otherwise, the routine is implementation dependent.
244:     - You MUST call VecRestoreArray() when you no longer need access to
245:       the array.
246:     - Note that the Fortran interface to VecGetArray() differs from the
247:       C version.  See the users manual for details.
248:   */
249:   VecGetArray(u,&u_localptr);

251:   /*
252:      We initialize the solution array by simply writing the solution
253:      directly into the array locations.  Alternatively, we could use
254:      VecSetValues() or VecSetValuesLocal().
255:   */
256:   for (i=mybase; i<myend; i++) {
257:     x = h*(PetscReal)i; /* current location in global grid */
258:     u_localptr[i-mybase] = 1.0 + x*x;
259:   }

261:   /*
262:      Restore vector
263:   */
264:   VecRestoreArray(u,&u_localptr);

266:   /*
267:      Print debugging information if desired
268:   */
269:   if (appctx->debug) {
270:     PetscPrintf(appctx->comm,"initial guess vector\n");
271:     VecView(u,PETSC_VIEWER_STDOUT_WORLD);
272:   }

274:   return 0;
275: }
276: /* --------------------------------------------------------------------- */
277: /*
278:    ExactSolution - Computes the exact solution at a given time.

280:    Input Parameters:
281:    t - current time
282:    solution - vector in which exact solution will be computed
283:    appctx - user-defined application context

285:    Output Parameter:
286:    solution - vector with the newly computed exact solution
287: */
288: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
289: {
290:   PetscScalar    *s_localptr,h = appctx->h,x;
291:   PetscInt       i,mybase,myend;

293:   /*
294:      Determine starting and ending points of each processor's
295:      range of grid values
296:   */
297:   VecGetOwnershipRange(solution,&mybase,&myend);

299:   /*
300:      Get a pointer to vector data.
301:   */
302:   VecGetArray(solution,&s_localptr);

304:   /*
305:      Simply write the solution directly into the array locations.
306:      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
307:   */
308:   for (i=mybase; i<myend; i++) {
309:     x = h*(PetscReal)i;
310:     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
311:   }

313:   /*
314:      Restore vector
315:   */
316:   VecRestoreArray(solution,&s_localptr);
317:   return 0;
318: }
319: /* --------------------------------------------------------------------- */
320: /*
321:    Monitor - User-provided routine to monitor the solution computed at
322:    each timestep.  This example plots the solution and computes the
323:    error in two different norms.

325:    Input Parameters:
326:    ts     - the timestep context
327:    step   - the count of the current step (with 0 meaning the
328:             initial condition)
329:    time   - the current time
330:    u      - the solution at this timestep
331:    ctx    - the user-provided context for this monitoring routine.
332:             In this case we use the application context which contains
333:             information about the problem size, workspace and the exact
334:             solution.
335: */
336: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
337: {
338:   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
339:   PetscReal      en2,en2s,enmax;
340:   PetscDraw      draw;

342:   /*
343:      We use the default X Windows viewer
344:              PETSC_VIEWER_DRAW_(appctx->comm)
345:      that is associated with the current communicator. This saves
346:      the effort of calling PetscViewerDrawOpen() to create the window.
347:      Note that if we wished to plot several items in separate windows we
348:      would create each viewer with PetscViewerDrawOpen() and store them in
349:      the application context, appctx.

351:      PetscReal buffering makes graphics look better.
352:   */
353:   PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw);
354:   PetscDrawSetDoubleBuffer(draw);
355:   VecView(u,PETSC_VIEWER_DRAW_(appctx->comm));

357:   /*
358:      Compute the exact solution at this timestep
359:   */
360:   ExactSolution(time,appctx->solution,appctx);

362:   /*
363:      Print debugging information if desired
364:   */
365:   if (appctx->debug) {
366:     PetscPrintf(appctx->comm,"Computed solution vector\n");
367:     VecView(u,PETSC_VIEWER_STDOUT_WORLD);
368:     PetscPrintf(appctx->comm,"Exact solution vector\n");
369:     VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
370:   }

372:   /*
373:      Compute the 2-norm and max-norm of the error
374:   */
375:   VecAXPY(appctx->solution,-1.0,u);
376:   VecNorm(appctx->solution,NORM_2,&en2);
377:   en2s = PetscSqrtReal(appctx->h)*en2;  /* scale the 2-norm by the grid spacing */
378:   VecNorm(appctx->solution,NORM_MAX,&enmax);

380:   /*
381:      PetscPrintf() causes only the first processor in this
382:      communicator to print the timestep information.
383:   */
384:   PetscPrintf(appctx->comm,"Timestep %D: time = %g 2-norm error = %g  max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax);

386:   /*
387:      Print debugging information if desired
388:   */
389:   if (appctx->debug) {
390:     PetscPrintf(appctx->comm,"Error vector\n");
391:     VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
392:   }
393:   return 0;
394: }
395: /* --------------------------------------------------------------------- */
396: /*
397:    RHSFunction - User-provided routine that evalues the right-hand-side
398:    function of the ODE.  This routine is set in the main program by
399:    calling TSSetRHSFunction().  We compute:
400:           global_out = F(global_in)

402:    Input Parameters:
403:    ts         - timesteping context
404:    t          - current time
405:    global_in  - vector containing the current iterate
406:    ctx        - (optional) user-provided context for function evaluation.
407:                 In this case we use the appctx defined above.

409:    Output Parameter:
410:    global_out - vector containing the newly evaluated function
411: */
412: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
413: {
414:   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
415:   DM                da        = appctx->da;        /* distributed array */
416:   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
417:   Vec               localwork = appctx->localwork; /* local ghosted work vector */
418:   PetscInt          i,localsize;
419:   PetscMPIInt       rank,size;
420:   PetscScalar       *copyptr,sc;
421:   const PetscScalar *localptr;

423:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
424:      Get ready for local function computations
425:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
426:   /*
427:      Scatter ghost points to local vector, using the 2-step process
428:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
429:      By placing code between these two statements, computations can be
430:      done while messages are in transition.
431:   */
432:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
433:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

435:   /*
436:       Access directly the values in our local INPUT work array
437:   */
438:   VecGetArrayRead(local_in,&localptr);

440:   /*
441:       Access directly the values in our local OUTPUT work array
442:   */
443:   VecGetArray(localwork,&copyptr);

445:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));

447:   /*
448:       Evaluate our function on the nodes owned by this processor
449:   */
450:   VecGetLocalSize(local_in,&localsize);

452:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
453:      Compute entries for the locally owned part
454:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

456:   /*
457:      Handle boundary conditions: This is done by using the boundary condition
458:         u(t,boundary) = g(t,boundary)
459:      for some function g. Now take the derivative with respect to t to obtain
460:         u_{t}(t,boundary) = g_{t}(t,boundary)

462:      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
463:              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
464:   */
465:   MPI_Comm_rank(appctx->comm,&rank);
466:   MPI_Comm_size(appctx->comm,&size);
467:   if (rank == 0)          copyptr[0]           = 1.0;
468:   if (rank == size-1) copyptr[localsize-1] = 2.0;

470:   /*
471:      Handle the interior nodes where the PDE is replace by finite
472:      difference operators.
473:   */
474:   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);

476:   /*
477:      Restore vectors
478:   */
479:   VecRestoreArrayRead(local_in,&localptr);
480:   VecRestoreArray(localwork,&copyptr);

482:   /*
483:      Insert values from the local OUTPUT vector into the global
484:      output vector
485:   */
486:   DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
487:   DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);

489:   /* Print debugging information if desired */
490:   if (appctx->debug) {
491:     PetscPrintf(appctx->comm,"RHS function vector\n");
492:     VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);
493:   }

495:   return 0;
496: }
497: /* --------------------------------------------------------------------- */
498: /*
499:    RHSJacobian - User-provided routine to compute the Jacobian of
500:    the nonlinear right-hand-side function of the ODE.

502:    Input Parameters:
503:    ts - the TS context
504:    t - current time
505:    global_in - global input vector
506:    dummy - optional user-defined context, as set by TSetRHSJacobian()

508:    Output Parameters:
509:    AA - Jacobian matrix
510:    BB - optionally different preconditioning matrix
511:    str - flag indicating matrix structure

513:   Notes:
514:   RHSJacobian computes entries for the locally owned part of the Jacobian.
515:    - Currently, all PETSc parallel matrix formats are partitioned by
516:      contiguous chunks of rows across the processors.
517:    - Each processor needs to insert only elements that it owns
518:      locally (but any non-local elements will be sent to the
519:      appropriate processor during matrix assembly).
520:    - Always specify global row and columns of matrix entries when
521:      using MatSetValues().
522:    - Here, we set all entries for a particular row at once.
523:    - Note that MatSetValues() uses 0-based row and column numbers
524:      in Fortran as well as in C.
525: */
526: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
527: {
528:   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
529:   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
530:   DM                da       = appctx->da;        /* distributed array */
531:   PetscScalar       v[3],sc;
532:   const PetscScalar *localptr;
533:   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;

535:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
536:      Get ready for local Jacobian computations
537:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
538:   /*
539:      Scatter ghost points to local vector, using the 2-step process
540:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
541:      By placing code between these two statements, computations can be
542:      done while messages are in transition.
543:   */
544:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
545:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

547:   /*
548:      Get pointer to vector data
549:   */
550:   VecGetArrayRead(local_in,&localptr);

552:   /*
553:      Get starting and ending locally owned rows of the matrix
554:   */
555:   MatGetOwnershipRange(BB,&mstarts,&mends);
556:   mstart = mstarts; mend = mends;

558:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
559:      Compute entries for the locally owned part of the Jacobian.
560:       - Currently, all PETSc parallel matrix formats are partitioned by
561:         contiguous chunks of rows across the processors.
562:       - Each processor needs to insert only elements that it owns
563:         locally (but any non-local elements will be sent to the
564:         appropriate processor during matrix assembly).
565:       - Here, we set all entries for a particular row at once.
566:       - We can set matrix entries either using either
567:         MatSetValuesLocal() or MatSetValues().
568:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

570:   /*
571:      Set matrix rows corresponding to boundary data
572:   */
573:   if (mstart == 0) {
574:     v[0] = 0.0;
575:     MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES);
576:     mstart++;
577:   }
578:   if (mend == appctx->m) {
579:     mend--;
580:     v[0] = 0.0;
581:     MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES);
582:   }

584:   /*
585:      Set matrix rows corresponding to interior data.  We construct the
586:      matrix one row at a time.
587:   */
588:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
589:   for (i=mstart; i<mend; i++) {
590:     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
591:     is     = i - mstart + 1;
592:     v[0]   = sc*localptr[is];
593:     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
594:     v[2]   = sc*localptr[is];
595:     MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES);
596:   }

598:   /*
599:      Restore vector
600:   */
601:   VecRestoreArrayRead(local_in,&localptr);

603:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
604:      Complete the matrix assembly process and set some options
605:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
606:   /*
607:      Assemble matrix, using the 2-step process:
608:        MatAssemblyBegin(), MatAssemblyEnd()
609:      Computations can be done while messages are in transition
610:      by placing code between these two statements.
611:   */
612:   MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY);
613:   MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY);
614:   if (BB != AA) {
615:     MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY);
616:     MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY);
617:   }

619:   /*
620:      Set and option to indicate that we will never add a new nonzero location
621:      to the matrix. If we do, it will generate an error.
622:   */
623:   MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);

625:   return 0;
626: }

628: /*TEST

630:     test:
631:       args: -nox -ts_dt 10 -mymonitor
632:       nsize: 2
633:       requires: !single

635:     test:
636:       suffix: tut_1
637:       nsize: 1
638:       args: -ts_max_steps 10 -ts_monitor

640:     test:
641:       suffix: tut_2
642:       nsize: 4
643:       args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor

645:     test:
646:       suffix: tut_3
647:       nsize: 4
648:       args: ./ex2 -ts_max_steps 10 -ts_monitor -M 128

650: TEST*/