Actual source code: ex50.c
2: static char help[] ="Solves one dimensional Burger's equation compares with exact solution\n\n";
4: /*
6: Not yet tested in parallel
8: */
9: /*
10: Concepts: TS^time-dependent nonlinear problems
11: Concepts: TS^Burger's equation
12: Processors: n
13: */
15: /* ------------------------------------------------------------------------
17: This program uses the one-dimensional Burger's equation
18: u_t = mu*u_xx - u u_x,
19: on the domain 0 <= x <= 1, with periodic boundary conditions
21: The operators are discretized with the spectral element method
23: See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
24: by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
25: used
27: See src/tao/unconstrained/tutorials/burgers_spectral.c
29: ------------------------------------------------------------------------- */
31: #include <petscts.h>
32: #include <petscdt.h>
33: #include <petscdraw.h>
34: #include <petscdmda.h>
36: /*
37: User-defined application context - contains data needed by the
38: application-provided call-back routines.
39: */
41: typedef struct {
42: PetscInt n; /* number of nodes */
43: PetscReal *nodes; /* GLL nodes */
44: PetscReal *weights; /* GLL weights */
45: } PetscGLL;
47: typedef struct {
48: PetscInt N; /* grid points per elements*/
49: PetscInt E; /* number of elements */
50: PetscReal tol_L2,tol_max; /* error norms */
51: PetscInt steps; /* number of timesteps */
52: PetscReal Tend; /* endtime */
53: PetscReal mu; /* viscosity */
54: PetscReal L; /* total length of domain */
55: PetscReal Le;
56: PetscReal Tadj;
57: } PetscParam;
59: typedef struct {
60: Vec grid; /* total grid */
61: Vec curr_sol;
62: } PetscData;
64: typedef struct {
65: Vec grid; /* total grid */
66: Vec mass; /* mass matrix for total integration */
67: Mat stiff; /* stifness matrix */
68: Mat keptstiff;
69: Mat grad;
70: PetscGLL gll;
71: } PetscSEMOperators;
73: typedef struct {
74: DM da; /* distributed array data structure */
75: PetscSEMOperators SEMop;
76: PetscParam param;
77: PetscData dat;
78: TS ts;
79: PetscReal initial_dt;
80: } AppCtx;
82: /*
83: User-defined routines
84: */
85: extern PetscErrorCode RHSMatrixLaplaciangllDM(TS,PetscReal,Vec,Mat,Mat,void*);
86: extern PetscErrorCode RHSMatrixAdvectiongllDM(TS,PetscReal,Vec,Mat,Mat,void*);
87: extern PetscErrorCode TrueSolution(TS,PetscReal,Vec,AppCtx*);
88: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
89: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
91: int main(int argc,char **argv)
92: {
93: AppCtx appctx; /* user-defined application context */
94: PetscInt i, xs, xm, ind, j, lenglob;
95: PetscReal x, *wrk_ptr1, *wrk_ptr2;
96: MatNullSpace nsp;
97: PetscMPIInt size;
99: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
100: Initialize program and set problem parameters
101: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
103: PetscInitialize(&argc,&argv,(char*)0,help);
105: /*initialize parameters */
106: appctx.param.N = 10; /* order of the spectral element */
107: appctx.param.E = 10; /* number of elements */
108: appctx.param.L = 4.0; /* length of the domain */
109: appctx.param.mu = 0.01; /* diffusion coefficient */
110: appctx.initial_dt = 5e-3;
111: appctx.param.steps = PETSC_MAX_INT;
112: appctx.param.Tend = 4;
114: PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL);
115: PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL);
116: PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL);
117: PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL);
118: appctx.param.Le = appctx.param.L/appctx.param.E;
120: MPI_Comm_size(PETSC_COMM_WORLD,&size);
123: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124: Create GLL data structures
125: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
126: PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights);
127: PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights);
128: appctx.SEMop.gll.n = appctx.param.N;
129: lenglob = appctx.param.E*(appctx.param.N-1);
131: /*
132: Create distributed array (DMDA) to manage parallel grid and vectors
133: and to set up the ghost point communication pattern. There are E*(Nl-1)+1
134: total grid values spread equally among all the processors, except first and last
135: */
137: DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da);
138: DMSetFromOptions(appctx.da);
139: DMSetUp(appctx.da);
141: /*
142: Extract global and local vectors from DMDA; we use these to store the
143: approximate solution. Then duplicate these for remaining vectors that
144: have the same types.
145: */
147: DMCreateGlobalVector(appctx.da,&appctx.dat.curr_sol);
148: VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.grid);
149: VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.mass);
151: DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL);
152: DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);
153: DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);
155: /* Compute function over the locally owned part of the grid */
157: xs=xs/(appctx.param.N-1);
158: xm=xm/(appctx.param.N-1);
160: /*
161: Build total grid and mass over entire mesh (multi-elemental)
162: */
164: for (i=xs; i<xs+xm; i++) {
165: for (j=0; j<appctx.param.N-1; j++) {
166: x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i;
167: ind=i*(appctx.param.N-1)+j;
168: wrk_ptr1[ind]=x;
169: wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
170: if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
171: }
172: }
173: DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);
174: DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);
176: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: Create matrix data structure; set matrix evaluation routine.
178: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
179: DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE);
180: DMCreateMatrix(appctx.da,&appctx.SEMop.stiff);
181: DMCreateMatrix(appctx.da,&appctx.SEMop.grad);
182: /*
183: For linear problems with a time-dependent f(u,t) in the equation
184: u_t = f(u,t), the user provides the discretized right-hand-side
185: as a time-dependent matrix.
186: */
187: RHSMatrixLaplaciangllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx);
188: RHSMatrixAdvectiongllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.grad,appctx.SEMop.grad,&appctx);
189: /*
190: For linear problems with a time-dependent f(u,t) in the equation
191: u_t = f(u,t), the user provides the discretized right-hand-side
192: as a time-dependent matrix.
193: */
195: MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff);
197: /* attach the null space to the matrix, this probably is not needed but does no harm */
198: MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);
199: MatSetNullSpace(appctx.SEMop.stiff,nsp);
200: MatSetNullSpace(appctx.SEMop.keptstiff,nsp);
201: MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL);
202: MatNullSpaceDestroy(&nsp);
203: /* attach the null space to the matrix, this probably is not needed but does no harm */
204: MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);
205: MatSetNullSpace(appctx.SEMop.grad,nsp);
206: MatNullSpaceTest(nsp,appctx.SEMop.grad,NULL);
207: MatNullSpaceDestroy(&nsp);
209: /* Create the TS solver that solves the ODE and its adjoint; set its options */
210: TSCreate(PETSC_COMM_WORLD,&appctx.ts);
211: TSSetProblemType(appctx.ts,TS_NONLINEAR);
212: TSSetType(appctx.ts,TSRK);
213: TSSetDM(appctx.ts,appctx.da);
214: TSSetTime(appctx.ts,0.0);
215: TSSetTimeStep(appctx.ts,appctx.initial_dt);
216: TSSetMaxSteps(appctx.ts,appctx.param.steps);
217: TSSetMaxTime(appctx.ts,appctx.param.Tend);
218: TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP);
219: TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL);
220: TSSetSaveTrajectory(appctx.ts);
221: TSSetFromOptions(appctx.ts);
222: TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx);
223: TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx);
225: /* Set Initial conditions for the problem */
226: TrueSolution(appctx.ts,0,appctx.dat.curr_sol,&appctx);
228: TSSetSolutionFunction(appctx.ts,(PetscErrorCode (*)(TS,PetscReal,Vec,void *))TrueSolution,&appctx);
229: TSSetTime(appctx.ts,0.0);
230: TSSetStepNumber(appctx.ts,0);
232: TSSolve(appctx.ts,appctx.dat.curr_sol);
234: MatDestroy(&appctx.SEMop.stiff);
235: MatDestroy(&appctx.SEMop.keptstiff);
236: MatDestroy(&appctx.SEMop.grad);
237: VecDestroy(&appctx.SEMop.grid);
238: VecDestroy(&appctx.SEMop.mass);
239: VecDestroy(&appctx.dat.curr_sol);
240: PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights);
241: DMDestroy(&appctx.da);
242: TSDestroy(&appctx.ts);
244: /*
245: Always call PetscFinalize() before exiting a program. This routine
246: - finalizes the PETSc libraries as well as MPI
247: - provides summary and diagnostic information if certain runtime
248: options are chosen (e.g., -log_summary).
249: */
250: PetscFinalize();
251: return 0;
252: }
254: /*
255: TrueSolution() computes the true solution for the PDE
257: Input Parameter:
258: u - uninitialized solution vector (global)
259: appctx - user-defined application context
261: Output Parameter:
262: u - vector with solution at initial time (global)
263: */
264: PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u,AppCtx *appctx)
265: {
266: PetscScalar *s;
267: const PetscScalar *xg;
268: PetscInt i,xs,xn;
270: DMDAVecGetArray(appctx->da,u,&s);
271: DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
272: DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
273: for (i=xs; i<xs+xn; i++) {
274: s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t)/(2.0+PetscCosScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t));
275: }
276: DMDAVecRestoreArray(appctx->da,u,&s);
277: DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
278: return 0;
279: }
281: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
282: {
283: AppCtx *appctx = (AppCtx*)ctx;
285: MatMult(appctx->SEMop.grad,globalin,globalout); /* grad u */
286: VecPointwiseMult(globalout,globalin,globalout); /* u grad u */
287: VecScale(globalout, -1.0);
288: MatMultAdd(appctx->SEMop.keptstiff,globalin,globalout,globalout);
289: return 0;
290: }
292: /*
294: K is the discretiziation of the Laplacian
295: G is the discretization of the gradient
297: Computes Jacobian of K u + diag(u) G u which is given by
298: K + diag(u)G + diag(Gu)
299: */
300: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx)
301: {
302: AppCtx *appctx = (AppCtx*)ctx;
303: Vec Gglobalin;
305: /* A = diag(u) G */
307: MatCopy(appctx->SEMop.grad,A,SAME_NONZERO_PATTERN);
308: MatDiagonalScale(A,globalin,NULL);
310: /* A = A + diag(Gu) */
311: VecDuplicate(globalin,&Gglobalin);
312: MatMult(appctx->SEMop.grad,globalin,Gglobalin);
313: MatDiagonalSet(A,Gglobalin,ADD_VALUES);
314: VecDestroy(&Gglobalin);
316: /* A = K - A */
317: MatScale(A,-1.0);
318: MatAXPY(A,0.0,appctx->SEMop.keptstiff,SAME_NONZERO_PATTERN);
319: return 0;
320: }
322: /* --------------------------------------------------------------------- */
324: #include "petscblaslapack.h"
325: /*
326: Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
327: */
328: PetscErrorCode MatMult_Laplacian(Mat A,Vec x,Vec y)
329: {
330: AppCtx *appctx;
331: PetscReal **temp,vv;
332: PetscInt i,j,xs,xn;
333: Vec xlocal,ylocal;
334: const PetscScalar *xl;
335: PetscScalar *yl;
336: PetscBLASInt _One = 1,n;
337: PetscScalar _DOne = 1;
339: MatShellGetContext(A,&appctx);
340: DMGetLocalVector(appctx->da,&xlocal);
341: DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal);
342: DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal);
343: DMGetLocalVector(appctx->da,&ylocal);
344: VecSet(ylocal,0.0);
345: PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
346: for (i=0; i<appctx->param.N; i++) {
347: vv =-appctx->param.mu*2.0/appctx->param.Le;
348: for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
349: }
350: DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl);
351: DMDAVecGetArray(appctx->da,ylocal,&yl);
352: DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
353: PetscBLASIntCast(appctx->param.N,&n);
354: for (j=xs; j<xs+xn; j += appctx->param.N-1) {
355: PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
356: }
357: DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl);
358: DMDAVecRestoreArray(appctx->da,ylocal,&yl);
359: PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
360: VecSet(y,0.0);
361: DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y);
362: DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y);
363: DMRestoreLocalVector(appctx->da,&xlocal);
364: DMRestoreLocalVector(appctx->da,&ylocal);
365: VecPointwiseDivide(y,y,appctx->SEMop.mass);
366: return 0;
367: }
369: PetscErrorCode MatMult_Advection(Mat A,Vec x,Vec y)
370: {
371: AppCtx *appctx;
372: PetscReal **temp;
373: PetscInt j,xs,xn;
374: Vec xlocal,ylocal;
375: const PetscScalar *xl;
376: PetscScalar *yl;
377: PetscBLASInt _One = 1,n;
378: PetscScalar _DOne = 1;
380: MatShellGetContext(A,&appctx);
381: DMGetLocalVector(appctx->da,&xlocal);
382: DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal);
383: DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal);
384: DMGetLocalVector(appctx->da,&ylocal);
385: VecSet(ylocal,0.0);
386: PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
387: DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl);
388: DMDAVecGetArray(appctx->da,ylocal,&yl);
389: DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
390: PetscBLASIntCast(appctx->param.N,&n);
391: for (j=xs; j<xs+xn; j += appctx->param.N-1) {
392: PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
393: }
394: DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl);
395: DMDAVecRestoreArray(appctx->da,ylocal,&yl);
396: PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
397: VecSet(y,0.0);
398: DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y);
399: DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y);
400: DMRestoreLocalVector(appctx->da,&xlocal);
401: DMRestoreLocalVector(appctx->da,&ylocal);
402: VecPointwiseDivide(y,y,appctx->SEMop.mass);
403: VecScale(y,-1.0);
404: return 0;
405: }
407: /*
408: RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
409: matrix for the Laplacian operator
411: Input Parameters:
412: ts - the TS context
413: t - current time (ignored)
414: X - current solution (ignored)
415: dummy - optional user-defined context, as set by TSetRHSJacobian()
417: Output Parameters:
418: AA - Jacobian matrix
419: BB - optionally different matrix from which the preconditioner is built
420: str - flag indicating matrix structure
422: */
423: PetscErrorCode RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
424: {
425: PetscReal **temp;
426: PetscReal vv;
427: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
428: PetscInt i,xs,xn,l,j;
429: PetscInt *rowsDM;
430: PetscBool flg = PETSC_FALSE;
432: PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL);
434: if (!flg) {
435: /*
436: Creates the element stiffness matrix for the given gll
437: */
438: PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
439: /* workaround for clang analyzer warning: Division by zero */
442: /* scale by the size of the element */
443: for (i=0; i<appctx->param.N; i++) {
444: vv=-appctx->param.mu*2.0/appctx->param.Le;
445: for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
446: }
448: MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
449: DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
451: xs = xs/(appctx->param.N-1);
452: xn = xn/(appctx->param.N-1);
454: PetscMalloc1(appctx->param.N,&rowsDM);
455: /*
456: loop over local elements
457: */
458: for (j=xs; j<xs+xn; j++) {
459: for (l=0; l<appctx->param.N; l++) {
460: rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
461: }
462: MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);
463: }
464: PetscFree(rowsDM);
465: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
466: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
467: VecReciprocal(appctx->SEMop.mass);
468: MatDiagonalScale(A,appctx->SEMop.mass,0);
469: VecReciprocal(appctx->SEMop.mass);
471: PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
472: } else {
473: MatSetType(A,MATSHELL);
474: MatSetUp(A);
475: MatShellSetContext(A,appctx);
476: MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Laplacian);
477: }
478: return 0;
479: }
481: /*
482: RHSMatrixAdvection - User-provided routine to compute the right-hand-side
483: matrix for the Advection (gradient) operator.
485: Input Parameters:
486: ts - the TS context
487: t - current time
488: global_in - global input vector
489: dummy - optional user-defined context, as set by TSetRHSJacobian()
491: Output Parameters:
492: AA - Jacobian matrix
493: BB - optionally different preconditioning matrix
494: str - flag indicating matrix structure
496: */
497: PetscErrorCode RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
498: {
499: PetscReal **temp;
500: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
501: PetscInt xs,xn,l,j;
502: PetscInt *rowsDM;
503: PetscBool flg = PETSC_FALSE;
505: PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL);
507: if (!flg) {
508: /*
509: Creates the advection matrix for the given gll
510: */
511: PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
512: MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
513: DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
514: xs = xs/(appctx->param.N-1);
515: xn = xn/(appctx->param.N-1);
517: PetscMalloc1(appctx->param.N,&rowsDM);
518: for (j=xs; j<xs+xn; j++) {
519: for (l=0; l<appctx->param.N; l++) {
520: rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
521: }
522: MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);
523: }
524: PetscFree(rowsDM);
525: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
526: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
528: VecReciprocal(appctx->SEMop.mass);
529: MatDiagonalScale(A,appctx->SEMop.mass,0);
530: VecReciprocal(appctx->SEMop.mass);
532: PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
533: } else {
534: MatSetType(A,MATSHELL);
535: MatSetUp(A);
536: MatShellSetContext(A,appctx);
537: MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Advection);
538: }
539: return 0;
540: }
542: /*TEST
544: build:
545: requires: !complex
547: test:
548: suffix: 1
549: requires: !single
551: test:
552: suffix: 2
553: nsize: 5
554: requires: !single
556: test:
557: suffix: 3
558: requires: !single
559: args: -ts_view -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error
561: test:
562: suffix: 4
563: requires: !single
564: args: -ts_view -ts_type beuler -pc_type none -ts_max_steps 5 -ts_monitor_error
566: TEST*/