Actual source code: ex21.c


  2: static char help[] ="Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
  3: timestepping.  Runtime options include:\n\
  4:   -M <xg>, where <xg> = number of grid points\n\
  5:   -debug : Activate debugging printouts\n\
  6:   -nox   : Deactivate x-window graphics\n\
  7:   -ul   : lower bound\n\
  8:   -uh  : upper bound\n\n";

 10: /*
 11:    Concepts: TS^time-dependent nonlinear problems
 12:    Concepts: TS^Variational inequality nonlinear solver
 13:    Processors: n
 14: */

 16: /* ------------------------------------------------------------------------

 18:    This is a variation of ex2.c to solve the PDE

 20:                u * u_xx
 21:          u_t = ---------
 22:                2*(t+1)^2

 24:     with box constraints on the interior grid points
 25:     ul <= u(t,x) <= uh with x != 0,1
 26:     on the domain 0 <= x <= 1, with boundary conditions
 27:          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
 28:     and initial condition
 29:          u(0,x) = 1 + x*x.

 31:     The exact solution is:
 32:          u(t,x) = (1 + x*x) * (1 + t)

 34:     We use by default the backward Euler method.

 36:   ------------------------------------------------------------------------- */

 38: /*
 39:    Include "petscts.h" to use the PETSc timestepping routines. Note that
 40:    this file automatically includes "petscsys.h" and other lower-level
 41:    PETSc include files.

 43:    Include the "petscdmda.h" to allow us to use the distributed array data
 44:    structures to manage the parallel grid.
 45: */
 46: #include <petscts.h>
 47: #include <petscdm.h>
 48: #include <petscdmda.h>
 49: #include <petscdraw.h>

 51: /*
 52:    User-defined application context - contains data needed by the
 53:    application-provided callback routines.
 54: */
 55: typedef struct {
 56:   MPI_Comm  comm;           /* communicator */
 57:   DM        da;             /* distributed array data structure */
 58:   Vec       localwork;      /* local ghosted work vector */
 59:   Vec       u_local;        /* local ghosted approximate solution vector */
 60:   Vec       solution;       /* global exact solution vector */
 61:   PetscInt  m;              /* total number of grid points */
 62:   PetscReal h;              /* mesh width: h = 1/(m-1) */
 63:   PetscBool debug;          /* flag (1 indicates activation of debugging printouts) */
 64: } AppCtx;

 66: /*
 67:    User-defined routines, provided below.
 68: */
 69: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
 70: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
 71: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
 72: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
 73: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
 74: extern PetscErrorCode SetBounds(Vec,Vec,PetscScalar,PetscScalar,AppCtx*);

 76: int main(int argc,char **argv)
 77: {
 78:   AppCtx         appctx;                 /* user-defined application context */
 79:   TS             ts;                     /* timestepping context */
 80:   Mat            A;                      /* Jacobian matrix data structure */
 81:   Vec            u;                      /* approximate solution vector */
 82:   Vec            r;                      /* residual vector */
 83:   PetscInt       time_steps_max = 1000;  /* default max timesteps */
 84:   PetscReal      dt;
 85:   PetscReal      time_total_max = 100.0; /* default max total time */
 86:   Vec            xl,xu; /* Lower and upper bounds on variables */
 87:   PetscScalar    ul=0.0,uh = 3.0;
 88:   PetscBool      mymonitor;
 89:   PetscReal      bounds[] = {1.0, 3.3};

 91:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 92:      Initialize program and set problem parameters
 93:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 95:   PetscInitialize(&argc,&argv,(char*)0,help);
 96:   PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds);

 98:   appctx.comm = PETSC_COMM_WORLD;
 99:   appctx.m    = 60;
100:   PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL);
101:   PetscOptionsGetScalar(NULL,NULL,"-ul",&ul,NULL);
102:   PetscOptionsGetScalar(NULL,NULL,"-uh",&uh,NULL);
103:   PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug);
104:   PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor);
105:   appctx.h    = 1.0/(appctx.m-1.0);

107:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108:      Create vector data structures
109:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

111:   /*
112:      Create distributed array (DMDA) to manage parallel grid and vectors
113:      and to set up the ghost point communication pattern.  There are M
114:      total grid values spread equally among all the processors.
115:   */
116:   DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da);
117:   DMSetFromOptions(appctx.da);
118:   DMSetUp(appctx.da);

120:   /*
121:      Extract global and local vectors from DMDA; we use these to store the
122:      approximate solution.  Then duplicate these for remaining vectors that
123:      have the same types.
124:   */
125:   DMCreateGlobalVector(appctx.da,&u);
126:   DMCreateLocalVector(appctx.da,&appctx.u_local);

128:   /*
129:      Create local work vector for use in evaluating right-hand-side function;
130:      create global work vector for storing exact solution.
131:   */
132:   VecDuplicate(appctx.u_local,&appctx.localwork);
133:   VecDuplicate(u,&appctx.solution);

135:   /* Create residual vector */
136:   VecDuplicate(u,&r);
137:   /* Create lower and upper bound vectors */
138:   VecDuplicate(u,&xl);
139:   VecDuplicate(u,&xu);
140:   SetBounds(xl,xu,ul,uh,&appctx);

142:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143:      Create timestepping solver context; set callback routine for
144:      right-hand-side function evaluation.
145:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

147:   TSCreate(PETSC_COMM_WORLD,&ts);
148:   TSSetProblemType(ts,TS_NONLINEAR);
149:   TSSetRHSFunction(ts,r,RHSFunction,&appctx);

151:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152:      Set optional user-defined monitoring routine
153:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

155:   if (mymonitor) {
156:     TSMonitorSet(ts,Monitor,&appctx,NULL);
157:   }

159:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160:      For nonlinear problems, the user can provide a Jacobian evaluation
161:      routine (or use a finite differencing approximation).

163:      Create matrix data structure; set Jacobian evaluation routine.
164:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

166:   MatCreate(PETSC_COMM_WORLD,&A);
167:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
168:   MatSetFromOptions(A);
169:   MatSetUp(A);
170:   TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);

172:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173:      Set solution vector and initial timestep
174:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

176:   dt   = appctx.h/2.0;
177:   TSSetTimeStep(ts,dt);

179:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
180:      Customize timestepping solver:
181:        - Set the solution method to be the Backward Euler method.
182:        - Set timestepping duration info
183:      Then set runtime options, which can override these defaults.
184:      For example,
185:           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
186:      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
187:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

189:   TSSetType(ts,TSBEULER);
190:   TSSetMaxSteps(ts,time_steps_max);
191:   TSSetMaxTime(ts,time_total_max);
192:   TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
193:   /* Set lower and upper bound on the solution vector for each time step */
194:   TSVISetVariableBounds(ts,xl,xu);
195:   TSSetFromOptions(ts);

197:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198:      Solve the problem
199:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

201:   /*
202:      Evaluate initial conditions
203:   */
204:   InitialConditions(u,&appctx);

206:   /*
207:      Run the timestepping solver
208:   */
209:   TSSolve(ts,u);

211:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
212:      Free work space.  All PETSc objects should be destroyed when they
213:      are no longer needed.
214:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

216:   VecDestroy(&r);
217:   VecDestroy(&xl);
218:   VecDestroy(&xu);
219:   TSDestroy(&ts);
220:   VecDestroy(&u);
221:   MatDestroy(&A);
222:   DMDestroy(&appctx.da);
223:   VecDestroy(&appctx.localwork);
224:   VecDestroy(&appctx.solution);
225:   VecDestroy(&appctx.u_local);

227:   /*
228:      Always call PetscFinalize() before exiting a program.  This routine
229:        - finalizes the PETSc libraries as well as MPI
230:        - provides summary and diagnostic information if certain runtime
231:          options are chosen (e.g., -log_view).
232:   */
233:   PetscFinalize();
234:   return 0;
235: }
236: /* --------------------------------------------------------------------- */
237: /*
238:    InitialConditions - Computes the solution at the initial time.

240:    Input Parameters:
241:    u - uninitialized solution vector (global)
242:    appctx - user-defined application context

244:    Output Parameter:
245:    u - vector with solution at initial time (global)
246: */
247: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
248: {
249:   PetscScalar    *u_localptr,h = appctx->h,x;
250:   PetscInt       i,mybase,myend;

252:   /*
253:      Determine starting point of each processor's range of
254:      grid values.
255:   */
256:   VecGetOwnershipRange(u,&mybase,&myend);

258:   /*
259:     Get a pointer to vector data.
260:     - For default PETSc vectors, VecGetArray() returns a pointer to
261:       the data array.  Otherwise, the routine is implementation dependent.
262:     - You MUST call VecRestoreArray() when you no longer need access to
263:       the array.
264:     - Note that the Fortran interface to VecGetArray() differs from the
265:       C version.  See the users manual for details.
266:   */
267:   VecGetArray(u,&u_localptr);

269:   /*
270:      We initialize the solution array by simply writing the solution
271:      directly into the array locations.  Alternatively, we could use
272:      VecSetValues() or VecSetValuesLocal().
273:   */
274:   for (i=mybase; i<myend; i++) {
275:     x = h*(PetscReal)i; /* current location in global grid */
276:     u_localptr[i-mybase] = 1.0 + x*x;
277:   }

279:   /*
280:      Restore vector
281:   */
282:   VecRestoreArray(u,&u_localptr);

284:   /*
285:      Print debugging information if desired
286:   */
287:   if (appctx->debug) {
288:      PetscPrintf(appctx->comm,"initial guess vector\n");
289:      VecView(u,PETSC_VIEWER_STDOUT_WORLD);
290:   }

292:   return 0;
293: }

295: /* --------------------------------------------------------------------- */
296: /*
297:   SetBounds - Sets the lower and uper bounds on the interior points

299:   Input parameters:
300:   xl - vector of lower bounds
301:   xu - vector of upper bounds
302:   ul - constant lower bound for all variables
303:   uh - constant upper bound for all variables
304:   appctx - Application context
305:  */
306: PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh,AppCtx *appctx)
307: {
308:   PetscScalar       *l,*u;
309:   PetscMPIInt       rank,size;
310:   PetscInt          localsize;

313:   VecSet(xl,ul);
314:   VecSet(xu,uh);
315:   VecGetLocalSize(xl,&localsize);
316:   VecGetArray(xl,&l);
317:   VecGetArray(xu,&u);

319:   MPI_Comm_rank(appctx->comm,&rank);
320:   MPI_Comm_size(appctx->comm,&size);
321:   if (rank == 0) {
322:     l[0] = -PETSC_INFINITY;
323:     u[0] =  PETSC_INFINITY;
324:   }
325:   if (rank == size-1) {
326:     l[localsize-1] = -PETSC_INFINITY;
327:     u[localsize-1] = PETSC_INFINITY;
328:   }
329:   VecRestoreArray(xl,&l);
330:   VecRestoreArray(xu,&u);
331:   return 0;
332: }

334: /* --------------------------------------------------------------------- */
335: /*
336:    ExactSolution - Computes the exact solution at a given time.

338:    Input Parameters:
339:    t - current time
340:    solution - vector in which exact solution will be computed
341:    appctx - user-defined application context

343:    Output Parameter:
344:    solution - vector with the newly computed exact solution
345: */
346: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
347: {
348:   PetscScalar    *s_localptr,h = appctx->h,x;
349:   PetscInt       i,mybase,myend;

351:   /*
352:      Determine starting and ending points of each processor's
353:      range of grid values
354:   */
355:   VecGetOwnershipRange(solution,&mybase,&myend);

357:   /*
358:      Get a pointer to vector data.
359:   */
360:   VecGetArray(solution,&s_localptr);

362:   /*
363:      Simply write the solution directly into the array locations.
364:      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
365:   */
366:   for (i=mybase; i<myend; i++) {
367:     x = h*(PetscReal)i;
368:     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
369:   }

371:   /*
372:      Restore vector
373:   */
374:   VecRestoreArray(solution,&s_localptr);
375:   return 0;
376: }
377: /* --------------------------------------------------------------------- */
378: /*
379:    Monitor - User-provided routine to monitor the solution computed at
380:    each timestep.  This example plots the solution and computes the
381:    error in two different norms.

383:    Input Parameters:
384:    ts     - the timestep context
385:    step   - the count of the current step (with 0 meaning the
386:             initial condition)
387:    time   - the current time
388:    u      - the solution at this timestep
389:    ctx    - the user-provided context for this monitoring routine.
390:             In this case we use the application context which contains
391:             information about the problem size, workspace and the exact
392:             solution.
393: */
394: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
395: {
396:   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
397:   PetscReal      en2,en2s,enmax;
398:   PetscDraw      draw;

400:   /*
401:      We use the default X windows viewer
402:              PETSC_VIEWER_DRAW_(appctx->comm)
403:      that is associated with the current communicator. This saves
404:      the effort of calling PetscViewerDrawOpen() to create the window.
405:      Note that if we wished to plot several items in separate windows we
406:      would create each viewer with PetscViewerDrawOpen() and store them in
407:      the application context, appctx.

409:      PetscReal buffering makes graphics look better.
410:   */
411:   PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw);
412:   PetscDrawSetDoubleBuffer(draw);
413:   VecView(u,PETSC_VIEWER_DRAW_(appctx->comm));

415:   /*
416:      Compute the exact solution at this timestep
417:   */
418:   ExactSolution(time,appctx->solution,appctx);

420:   /*
421:      Print debugging information if desired
422:   */
423:   if (appctx->debug) {
424:     PetscPrintf(appctx->comm,"Computed solution vector\n");
425:     VecView(u,PETSC_VIEWER_STDOUT_WORLD);
426:     PetscPrintf(appctx->comm,"Exact solution vector\n");
427:     VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
428:   }

430:   /*
431:      Compute the 2-norm and max-norm of the error
432:   */
433:   VecAXPY(appctx->solution,-1.0,u);
434:   VecNorm(appctx->solution,NORM_2,&en2);
435:   en2s = PetscSqrtReal(appctx->h)*en2;  /* scale the 2-norm by the grid spacing */
436:   VecNorm(appctx->solution,NORM_MAX,&enmax);

438:   /*
439:      PetscPrintf() causes only the first processor in this
440:      communicator to print the timestep information.
441:   */
442:   PetscPrintf(appctx->comm,"Timestep %D: time = %g,2-norm error = %g, max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax);

444:   /*
445:      Print debugging information if desired
446:    */
447:   /*  if (appctx->debug) {
448:      PetscPrintf(appctx->comm,"Error vector\n");
449:      VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
450:    } */
451:   return 0;
452: }
453: /* --------------------------------------------------------------------- */
454: /*
455:    RHSFunction - User-provided routine that evalues the right-hand-side
456:    function of the ODE.  This routine is set in the main program by
457:    calling TSSetRHSFunction().  We compute:
458:           global_out = F(global_in)

460:    Input Parameters:
461:    ts         - timesteping context
462:    t          - current time
463:    global_in  - vector containing the current iterate
464:    ctx        - (optional) user-provided context for function evaluation.
465:                 In this case we use the appctx defined above.

467:    Output Parameter:
468:    global_out - vector containing the newly evaluated function
469: */
470: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
471: {
472:   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined application context */
473:   DM                da        = appctx->da;        /* distributed array */
474:   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
475:   Vec               localwork = appctx->localwork; /* local ghosted work vector */
476:   PetscInt          i,localsize;
477:   PetscMPIInt       rank,size;
478:   PetscScalar       *copyptr,sc;
479:   const PetscScalar *localptr;

481:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
482:      Get ready for local function computations
483:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
484:   /*
485:      Scatter ghost points to local vector, using the 2-step process
486:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
487:      By placing code between these two statements, computations can be
488:      done while messages are in transition.
489:   */
490:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
491:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

493:   /*
494:       Access directly the values in our local INPUT work array
495:   */
496:   VecGetArrayRead(local_in,&localptr);

498:   /*
499:       Access directly the values in our local OUTPUT work array
500:   */
501:   VecGetArray(localwork,&copyptr);

503:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));

505:   /*
506:       Evaluate our function on the nodes owned by this processor
507:   */
508:   VecGetLocalSize(local_in,&localsize);

510:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
511:      Compute entries for the locally owned part
512:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

514:   /*
515:      Handle boundary conditions: This is done by using the boundary condition
516:         u(t,boundary) = g(t,boundary)
517:      for some function g. Now take the derivative with respect to t to obtain
518:         u_{t}(t,boundary) = g_{t}(t,boundary)

520:      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
521:              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
522:   */
523:   MPI_Comm_rank(appctx->comm,&rank);
524:   MPI_Comm_size(appctx->comm,&size);
525:   if (rank == 0) copyptr[0] = 1.0;
526:   if (rank == size-1) copyptr[localsize-1] = (t < .5) ? 2.0 : 0.0;

528:   /*
529:      Handle the interior nodes where the PDE is replace by finite
530:      difference operators.
531:   */
532:   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);

534:   /*
535:      Restore vectors
536:   */
537:   VecRestoreArrayRead(local_in,&localptr);
538:   VecRestoreArray(localwork,&copyptr);

540:   /*
541:      Insert values from the local OUTPUT vector into the global
542:      output vector
543:   */
544:   DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
545:   DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);

547:   /* Print debugging information if desired */
548:   /*  if (appctx->debug) {
549:      PetscPrintf(appctx->comm,"RHS function vector\n");
550:      VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);
551:    } */

553:   return 0;
554: }
555: /* --------------------------------------------------------------------- */
556: /*
557:    RHSJacobian - User-provided routine to compute the Jacobian of
558:    the nonlinear right-hand-side function of the ODE.

560:    Input Parameters:
561:    ts - the TS context
562:    t - current time
563:    global_in - global input vector
564:    dummy - optional user-defined context, as set by TSetRHSJacobian()

566:    Output Parameters:
567:    AA - Jacobian matrix
568:    BB - optionally different preconditioning matrix
569:    str - flag indicating matrix structure

571:   Notes:
572:   RHSJacobian computes entries for the locally owned part of the Jacobian.
573:    - Currently, all PETSc parallel matrix formats are partitioned by
574:      contiguous chunks of rows across the processors.
575:    - Each processor needs to insert only elements that it owns
576:      locally (but any non-local elements will be sent to the
577:      appropriate processor during matrix assembly).
578:    - Always specify global row and columns of matrix entries when
579:      using MatSetValues().
580:    - Here, we set all entries for a particular row at once.
581:    - Note that MatSetValues() uses 0-based row and column numbers
582:      in Fortran as well as in C.
583: */
584: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat B,void *ctx)
585: {
586:   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined application context */
587:   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
588:   DM                da       = appctx->da;        /* distributed array */
589:   PetscScalar       v[3],sc;
590:   const PetscScalar *localptr;
591:   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;

593:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
594:      Get ready for local Jacobian computations
595:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
596:   /*
597:      Scatter ghost points to local vector, using the 2-step process
598:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
599:      By placing code between these two statements, computations can be
600:      done while messages are in transition.
601:   */
602:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
603:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

605:   /*
606:      Get pointer to vector data
607:   */
608:   VecGetArrayRead(local_in,&localptr);

610:   /*
611:      Get starting and ending locally owned rows of the matrix
612:   */
613:   MatGetOwnershipRange(B,&mstarts,&mends);
614:   mstart = mstarts; mend = mends;

616:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
617:      Compute entries for the locally owned part of the Jacobian.
618:       - Currently, all PETSc parallel matrix formats are partitioned by
619:         contiguous chunks of rows across the processors.
620:       - Each processor needs to insert only elements that it owns
621:         locally (but any non-local elements will be sent to the
622:         appropriate processor during matrix assembly).
623:       - Here, we set all entries for a particular row at once.
624:       - We can set matrix entries either using either
625:         MatSetValuesLocal() or MatSetValues().
626:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

628:   /*
629:      Set matrix rows corresponding to boundary data
630:   */
631:   if (mstart == 0) {
632:     v[0] = 0.0;
633:     MatSetValues(B,1,&mstart,1,&mstart,v,INSERT_VALUES);
634:     mstart++;
635:   }
636:   if (mend == appctx->m) {
637:     mend--;
638:     v[0] = 0.0;
639:     MatSetValues(B,1,&mend,1,&mend,v,INSERT_VALUES);
640:   }

642:   /*
643:      Set matrix rows corresponding to interior data.  We construct the
644:      matrix one row at a time.
645:   */
646:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
647:   for (i=mstart; i<mend; i++) {
648:     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
649:     is     = i - mstart + 1;
650:     v[0]   = sc*localptr[is];
651:     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
652:     v[2]   = sc*localptr[is];
653:     MatSetValues(B,1,&i,3,idx,v,INSERT_VALUES);
654:   }

656:   /*
657:      Restore vector
658:   */
659:   VecRestoreArrayRead(local_in,&localptr);

661:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
662:      Complete the matrix assembly process and set some options
663:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
664:   /*
665:      Assemble matrix, using the 2-step process:
666:        MatAssemblyBegin(), MatAssemblyEnd()
667:      Computations can be done while messages are in transition
668:      by placing code between these two statements.
669:   */
670:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
671:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);

673:   /*
674:      Set and option to indicate that we will never add a new nonzero location
675:      to the matrix. If we do, it will generate an error.
676:   */
677:   MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);

679:   return 0;
680: }

682: /*TEST

684:     test:
685:       args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox
686:       requires: !single

688: TEST*/