Actual source code: ex73.c
1: /*T
2: Concepts: KSP^solving a system of linear equations
3: Concepts: KSP^Laplacian, 2d
4: Processors: n
5: T*/
7: /*
8: This example was derived from src/ksp/ksp/tutorials ex29.c
10: Inhomogeneous Laplacian in 2D. Modeled by the partial differential equation
12: -div \rho grad u = f, 0 < x,y < 1,
14: with forcing function
16: f = e^{-x^2/\nu} e^{-y^2/\nu}
18: with Dirichlet boundary conditions
20: u = f(x,y) for x = 0, x = 1, y = 0, y = 1
22: or pure Neumman boundary conditions.
23: */
25: static char help[] = "Solves 2D inhomogeneous Laplacian. Demonstates using PCTelescopeSetCoarseDM functionality of PCTelescope via a DMShell\n\n";
27: #include <petscdm.h>
28: #include <petscdmda.h>
29: #include <petscdmshell.h>
30: #include <petscksp.h>
32: PetscErrorCode ComputeMatrix_ShellDA(KSP,Mat,Mat,void*);
33: PetscErrorCode ComputeMatrix_DMDA(DM,Mat,Mat,void*);
34: PetscErrorCode ComputeRHS_DMDA(DM,Vec,void*);
35: PetscErrorCode DMShellCreate_ShellDA(DM,DM*);
36: PetscErrorCode DMFieldScatter_ShellDA(DM,Vec,ScatterMode,DM,Vec);
37: PetscErrorCode DMStateScatter_ShellDA(DM,ScatterMode,DM);
39: typedef enum {DIRICHLET, NEUMANN} BCType;
41: typedef struct {
42: PetscReal rho;
43: PetscReal nu;
44: BCType bcType;
45: MPI_Comm comm;
46: } UserContext;
48: PetscErrorCode UserContextCreate(MPI_Comm comm,UserContext **ctx)
49: {
50: UserContext *user;
51: const char *bcTypes[2] = {"dirichlet","neumann"};
52: PetscInt bc;
56: PetscCalloc1(1,&user);
57: user->comm = comm;
58: PetscOptionsBegin(comm, "", "Options for the inhomogeneous Poisson equation", "DMqq");
59: user->rho = 1.0;
60: PetscOptionsReal("-rho", "The conductivity", "ex29.c", user->rho, &user->rho, NULL);
61: user->nu = 0.1;
62: PetscOptionsReal("-nu", "The width of the Gaussian source", "ex29.c", user->nu, &user->nu, NULL);
63: bc = (PetscInt)DIRICHLET;
64: PetscOptionsEList("-bc_type","Type of boundary condition","ex29.c",bcTypes,2,bcTypes[0],&bc,NULL);
65: user->bcType = (BCType)bc;
66: PetscOptionsEnd();
67: *ctx = user;
68: return 0;
69: }
71: PetscErrorCode CommCoarsen(MPI_Comm comm,PetscInt number,PetscSubcomm *p)
72: {
73: PetscSubcomm psubcomm;
75: PetscSubcommCreate(comm,&psubcomm);
76: PetscSubcommSetNumber(psubcomm,number);
77: PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_INTERLACED);
78: *p = psubcomm;
79: return 0;
80: }
82: PetscErrorCode CommHierarchyCreate(MPI_Comm comm,PetscInt n,PetscInt number[],PetscSubcomm pscommlist[])
83: {
84: PetscInt k;
85: PetscBool view_hierarchy = PETSC_FALSE;
88: for (k=0; k<n; k++) {
89: pscommlist[k] = NULL;
90: }
92: if (n < 1) return 0;
94: CommCoarsen(comm,number[n-1],&pscommlist[n-1]);
95: for (k=n-2; k>=0; k--) {
96: MPI_Comm comm_k = PetscSubcommChild(pscommlist[k+1]);
97: if (pscommlist[k+1]->color == 0) {
98: CommCoarsen(comm_k,number[k],&pscommlist[k]);
99: }
100: }
102: PetscOptionsGetBool(NULL,NULL,"-view_hierarchy",&view_hierarchy,NULL);
103: if (view_hierarchy) {
104: PetscMPIInt size;
106: MPI_Comm_size(comm,&size);
107: PetscPrintf(comm,"level[%D] size %d\n",n,(int)size);
108: for (k=n-1; k>=0; k--) {
109: if (pscommlist[k]) {
110: MPI_Comm comm_k = PetscSubcommChild(pscommlist[k]);
112: if (pscommlist[k]->color == 0) {
113: MPI_Comm_size(comm_k,&size);
114: PetscPrintf(comm_k,"level[%D] size %d\n",k,(int)size);
115: }
116: }
117: }
118: }
119: return 0;
120: }
122: /* taken from src/ksp/pc/impls/telescope/telescope_dmda.c */
123: static PetscErrorCode _DMDADetermineRankFromGlobalIJ_2d(PetscInt i,PetscInt j,PetscInt Mp,PetscInt Np,
124: PetscInt start_i[],PetscInt start_j[],
125: PetscInt span_i[],PetscInt span_j[],
126: PetscMPIInt *_pi,PetscMPIInt *_pj,PetscMPIInt *rank_re)
127: {
128: PetscInt pi,pj,n;
131: *rank_re = -1;
132: pi = pj = -1;
133: if (_pi) {
134: for (n=0; n<Mp; n++) {
135: if ((i >= start_i[n]) && (i < start_i[n]+span_i[n])) {
136: pi = n;
137: break;
138: }
139: }
141: *_pi = (PetscMPIInt)pi;
142: }
144: if (_pj) {
145: for (n=0; n<Np; n++) {
146: if ((j >= start_j[n]) && (j < start_j[n]+span_j[n])) {
147: pj = n;
148: break;
149: }
150: }
152: *_pj = (PetscMPIInt)pj;
153: }
155: *rank_re = (PetscMPIInt)(pi + pj * Mp);
156: return 0;
157: }
159: /* taken from src/ksp/pc/impls/telescope/telescope_dmda.c */
160: static PetscErrorCode _DMDADetermineGlobalS0_2d(PetscMPIInt rank_re,PetscInt Mp_re,PetscInt Np_re,
161: PetscInt range_i_re[],PetscInt range_j_re[],PetscInt *s0)
162: {
163: PetscInt i,j,start_IJ = 0;
164: PetscMPIInt rank_ij;
167: *s0 = -1;
168: for (j=0; j<Np_re; j++) {
169: for (i=0; i<Mp_re; i++) {
170: rank_ij = (PetscMPIInt)(i + j*Mp_re);
171: if (rank_ij < rank_re) {
172: start_IJ += range_i_re[i]*range_j_re[j];
173: }
174: }
175: }
176: *s0 = start_IJ;
177: return 0;
178: }
180: /* adapted from src/ksp/pc/impls/telescope/telescope_dmda.c */
181: static PetscErrorCode DMDACreatePermutation_2d(DM dmrepart,DM dmf,Mat *mat)
182: {
184: PetscInt k,sum,Mp_re = 0,Np_re = 0;
185: PetscInt nx,ny,sr,er,Mr,ndof;
186: PetscInt i,j,location,startI[2],endI[2],lenI[2];
187: const PetscInt *_range_i_re = NULL,*_range_j_re = NULL;
188: PetscInt *range_i_re,*range_j_re;
189: PetscInt *start_i_re,*start_j_re;
190: MPI_Comm comm;
191: Vec V;
192: Mat Pscalar;
195: PetscInfo(dmf,"setting up the permutation matrix (DMDA-2D)\n");
196: PetscObjectGetComm((PetscObject)dmf,&comm);
198: _range_i_re = _range_j_re = NULL;
199: /* Create DMDA on the child communicator */
200: if (dmrepart) {
201: DMDAGetInfo(dmrepart,NULL,NULL,NULL,NULL,&Mp_re,&Np_re,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
202: DMDAGetOwnershipRanges(dmrepart,&_range_i_re,&_range_j_re,NULL);
203: }
205: /* note - assume rank 0 always participates */
206: MPI_Bcast(&Mp_re,1,MPIU_INT,0,comm);
207: MPI_Bcast(&Np_re,1,MPIU_INT,0,comm);
209: PetscCalloc1(Mp_re,&range_i_re);
210: PetscCalloc1(Np_re,&range_j_re);
212: if (_range_i_re) PetscArraycpy(range_i_re,_range_i_re,Mp_re);
213: if (_range_j_re) PetscArraycpy(range_j_re,_range_j_re,Np_re);
215: MPI_Bcast(range_i_re,Mp_re,MPIU_INT,0,comm);
216: MPI_Bcast(range_j_re,Np_re,MPIU_INT,0,comm);
218: PetscMalloc1(Mp_re,&start_i_re);
219: PetscMalloc1(Np_re,&start_j_re);
221: sum = 0;
222: for (k=0; k<Mp_re; k++) {
223: start_i_re[k] = sum;
224: sum += range_i_re[k];
225: }
227: sum = 0;
228: for (k=0; k<Np_re; k++) {
229: start_j_re[k] = sum;
230: sum += range_j_re[k];
231: }
233: /* Create permutation */
234: DMDAGetInfo(dmf,NULL,&nx,&ny,NULL,NULL,NULL,NULL,&ndof,NULL,NULL,NULL,NULL,NULL);
235: DMGetGlobalVector(dmf,&V);
236: VecGetSize(V,&Mr);
237: VecGetOwnershipRange(V,&sr,&er);
238: DMRestoreGlobalVector(dmf,&V);
239: sr = sr / ndof;
240: er = er / ndof;
241: Mr = Mr / ndof;
243: MatCreate(comm,&Pscalar);
244: MatSetSizes(Pscalar,(er-sr),(er-sr),Mr,Mr);
245: MatSetType(Pscalar,MATAIJ);
246: MatSeqAIJSetPreallocation(Pscalar,1,NULL);
247: MatMPIAIJSetPreallocation(Pscalar,1,NULL,1,NULL);
249: DMDAGetCorners(dmf,NULL,NULL,NULL,&lenI[0],&lenI[1],NULL);
250: DMDAGetCorners(dmf,&startI[0],&startI[1],NULL,&endI[0],&endI[1],NULL);
251: endI[0] += startI[0];
252: endI[1] += startI[1];
254: for (j=startI[1]; j<endI[1]; j++) {
255: for (i=startI[0]; i<endI[0]; i++) {
256: PetscMPIInt rank_ijk_re,rank_reI[] = {0,0};
257: PetscInt s0_re;
258: PetscInt ii,jj,local_ijk_re,mapped_ijk;
259: PetscInt lenI_re[] = {0,0};
261: location = (i - startI[0]) + (j - startI[1])*lenI[0];
262: _DMDADetermineRankFromGlobalIJ_2d(i,j,Mp_re,Np_re,
263: start_i_re,start_j_re,
264: range_i_re,range_j_re,
265: &rank_reI[0],&rank_reI[1],&rank_ijk_re);
267: _DMDADetermineGlobalS0_2d(rank_ijk_re,Mp_re,Np_re,range_i_re,range_j_re,&s0_re);
269: ii = i - start_i_re[ rank_reI[0] ];
271: jj = j - start_j_re[ rank_reI[1] ];
274: lenI_re[0] = range_i_re[ rank_reI[0] ];
275: lenI_re[1] = range_j_re[ rank_reI[1] ];
276: local_ijk_re = ii + jj * lenI_re[0];
277: mapped_ijk = s0_re + local_ijk_re;
278: MatSetValue(Pscalar,sr+location,mapped_ijk,1.0,INSERT_VALUES);
279: }
280: }
281: MatAssemblyBegin(Pscalar,MAT_FINAL_ASSEMBLY);
282: MatAssemblyEnd(Pscalar,MAT_FINAL_ASSEMBLY);
284: *mat = Pscalar;
286: PetscFree(range_i_re);
287: PetscFree(range_j_re);
288: PetscFree(start_i_re);
289: PetscFree(start_j_re);
290: return 0;
291: }
293: /* adapted from src/ksp/pc/impls/telescope/telescope_dmda.c */
294: static PetscErrorCode PCTelescopeSetUp_dmda_scatters(DM dmf,DM dmc)
295: {
296: Vec xred,yred,xtmp,x,xp;
297: VecScatter scatter;
298: IS isin;
299: PetscInt m,bs,st,ed;
300: MPI_Comm comm;
301: VecType vectype;
304: PetscObjectGetComm((PetscObject)dmf,&comm);
305: DMCreateGlobalVector(dmf,&x);
306: VecGetBlockSize(x,&bs);
307: VecGetType(x,&vectype);
309: /* cannot use VecDuplicate as xp is already composed with dmf */
310: /*VecDuplicate(x,&xp);*/
311: {
312: PetscInt m,M;
314: VecGetSize(x,&M);
315: VecGetLocalSize(x,&m);
316: VecCreate(comm,&xp);
317: VecSetSizes(xp,m,M);
318: VecSetBlockSize(xp,bs);
319: VecSetType(xp,vectype);
320: }
322: m = 0;
323: xred = NULL;
324: yred = NULL;
325: if (dmc) {
326: DMCreateGlobalVector(dmc,&xred);
327: VecDuplicate(xred,&yred);
328: VecGetOwnershipRange(xred,&st,&ed);
329: ISCreateStride(comm,ed-st,st,1,&isin);
330: VecGetLocalSize(xred,&m);
331: } else {
332: VecGetOwnershipRange(x,&st,&ed);
333: ISCreateStride(comm,0,st,1,&isin);
334: }
335: ISSetBlockSize(isin,bs);
336: VecCreate(comm,&xtmp);
337: VecSetSizes(xtmp,m,PETSC_DECIDE);
338: VecSetBlockSize(xtmp,bs);
339: VecSetType(xtmp,vectype);
340: VecScatterCreate(x,isin,xtmp,NULL,&scatter);
342: PetscObjectCompose((PetscObject)dmf,"isin",(PetscObject)isin);
343: PetscObjectCompose((PetscObject)dmf,"scatter",(PetscObject)scatter);
344: PetscObjectCompose((PetscObject)dmf,"xtmp",(PetscObject)xtmp);
345: PetscObjectCompose((PetscObject)dmf,"xp",(PetscObject)xp);
347: VecDestroy(&xred);
348: VecDestroy(&yred);
349: VecDestroy(&x);
350: return 0;
351: }
353: PetscErrorCode DMCreateMatrix_ShellDA(DM dm,Mat *A)
354: {
355: DM da;
356: MPI_Comm comm;
357: PetscMPIInt size;
358: UserContext *ctx = NULL;
359: PetscInt M,N;
362: DMShellGetContext(dm,&da);
363: PetscObjectGetComm((PetscObject)da,&comm);
364: MPI_Comm_size(comm,&size);
365: DMCreateMatrix(da,A);
366: MatGetSize(*A,&M,&N);
367: PetscPrintf(comm,"[size %D] DMCreateMatrix_ShellDA (%D x %D)\n",(PetscInt)size,M,N);
369: DMGetApplicationContext(dm,&ctx);
370: if (ctx->bcType == NEUMANN) {
371: MatNullSpace nullspace = NULL;
372: PetscPrintf(comm,"[size %D] DMCreateMatrix_ShellDA: using neumann bcs\n",(PetscInt)size);
374: MatGetNullSpace(*A,&nullspace);
375: if (!nullspace) {
376: PetscPrintf(comm,"[size %D] DMCreateMatrix_ShellDA: operator does not have nullspace - attaching\n",(PetscInt)size);
377: MatNullSpaceCreate(comm,PETSC_TRUE,0,0,&nullspace);
378: MatSetNullSpace(*A,nullspace);
379: MatNullSpaceDestroy(&nullspace);
380: } else {
381: PetscPrintf(comm,"[size %D] DMCreateMatrix_ShellDA: operator already has a nullspace\n",(PetscInt)size);
382: }
383: }
384: return 0;
385: }
387: PetscErrorCode DMCreateGlobalVector_ShellDA(DM dm,Vec *x)
388: {
389: DM da;
391: DMShellGetContext(dm,&da);
392: DMCreateGlobalVector(da,x);
393: VecSetDM(*x,dm);
394: return 0;
395: }
397: PetscErrorCode DMCreateLocalVector_ShellDA(DM dm,Vec *x)
398: {
399: DM da;
401: DMShellGetContext(dm,&da);
402: DMCreateLocalVector(da,x);
403: VecSetDM(*x,dm);
404: return 0;
405: }
407: PetscErrorCode DMCoarsen_ShellDA(DM dm,MPI_Comm comm,DM *dmc)
408: {
410: *dmc = NULL;
411: DMGetCoarseDM(dm,dmc);
412: if (!*dmc) {
413: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"The coarse DM should never be NULL. The DM hierarchy should have already been defined");
414: } else {
415: PetscObjectReference((PetscObject)(*dmc));
416: }
417: return 0;
418: }
420: PetscErrorCode DMCreateInterpolation_ShellDA(DM dm1,DM dm2,Mat *mat,Vec *vec)
421: {
422: DM da1,da2;
424: DMShellGetContext(dm1,&da1);
425: DMShellGetContext(dm2,&da2);
426: DMCreateInterpolation(da1,da2,mat,vec);
427: return 0;
428: }
430: PetscErrorCode DMShellDASetUp_TelescopeDMScatter(DM dmf_shell,DM dmc_shell)
431: {
432: Mat P = NULL;
433: DM dmf = NULL,dmc = NULL;
436: DMShellGetContext(dmf_shell,&dmf);
437: if (dmc_shell) {
438: DMShellGetContext(dmc_shell,&dmc);
439: }
440: DMDACreatePermutation_2d(dmc,dmf,&P);
441: PetscObjectCompose((PetscObject)dmf,"P",(PetscObject)P);
442: PCTelescopeSetUp_dmda_scatters(dmf,dmc);
443: PetscObjectComposeFunction((PetscObject)dmf_shell,"PCTelescopeFieldScatter",DMFieldScatter_ShellDA);
444: PetscObjectComposeFunction((PetscObject)dmf_shell,"PCTelescopeStateScatter",DMStateScatter_ShellDA);
445: return 0;
446: }
448: PetscErrorCode DMShellDAFieldScatter_Forward(DM dmf,Vec x,DM dmc,Vec xc)
449: {
450: Mat P = NULL;
451: Vec xp = NULL,xtmp = NULL;
452: VecScatter scatter = NULL;
453: const PetscScalar *x_array;
454: PetscInt i,st,ed;
457: PetscObjectQuery((PetscObject)dmf,"P",(PetscObject*)&P);
458: PetscObjectQuery((PetscObject)dmf,"xp",(PetscObject*)&xp);
459: PetscObjectQuery((PetscObject)dmf,"scatter",(PetscObject*)&scatter);
460: PetscObjectQuery((PetscObject)dmf,"xtmp",(PetscObject*)&xtmp);
466: MatMultTranspose(P,x,xp);
468: /* pull in vector x->xtmp */
469: VecScatterBegin(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
470: VecScatterEnd(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
472: /* copy vector entries into xred */
473: VecGetArrayRead(xtmp,&x_array);
474: if (xc) {
475: PetscScalar *LA_xred;
476: VecGetOwnershipRange(xc,&st,&ed);
478: VecGetArray(xc,&LA_xred);
479: for (i=0; i<ed-st; i++) {
480: LA_xred[i] = x_array[i];
481: }
482: VecRestoreArray(xc,&LA_xred);
483: }
484: VecRestoreArrayRead(xtmp,&x_array);
485: return 0;
486: }
488: PetscErrorCode DMShellDAFieldScatter_Reverse(DM dmf,Vec y,DM dmc,Vec yc)
489: {
490: Mat P = NULL;
491: Vec xp = NULL,xtmp = NULL;
492: VecScatter scatter = NULL;
493: PetscScalar *array;
494: PetscInt i,st,ed;
497: PetscObjectQuery((PetscObject)dmf,"P",(PetscObject*)&P);
498: PetscObjectQuery((PetscObject)dmf,"xp",(PetscObject*)&xp);
499: PetscObjectQuery((PetscObject)dmf,"scatter",(PetscObject*)&scatter);
500: PetscObjectQuery((PetscObject)dmf,"xtmp",(PetscObject*)&xtmp);
507: /* return vector */
508: VecGetArray(xtmp,&array);
509: if (yc) {
510: const PetscScalar *LA_yred;
511: VecGetOwnershipRange(yc,&st,&ed);
512: VecGetArrayRead(yc,&LA_yred);
513: for (i=0; i<ed-st; i++) {
514: array[i] = LA_yred[i];
515: }
516: VecRestoreArrayRead(yc,&LA_yred);
517: }
518: VecRestoreArray(xtmp,&array);
519: VecScatterBegin(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
520: VecScatterEnd(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
521: MatMult(P,xp,y);
522: return 0;
523: }
525: PetscErrorCode DMFieldScatter_ShellDA(DM dmf_shell,Vec x,ScatterMode mode,DM dmc_shell,Vec xc)
526: {
527: DM dmf = NULL,dmc = NULL;
530: DMShellGetContext(dmf_shell,&dmf);
531: if (dmc_shell) {
532: DMShellGetContext(dmc_shell,&dmc);
533: }
534: if (mode == SCATTER_FORWARD) {
535: DMShellDAFieldScatter_Forward(dmf,x,dmc,xc);
536: } else if (mode == SCATTER_REVERSE) {
537: DMShellDAFieldScatter_Reverse(dmf,x,dmc,xc);
538: } else SETERRQ(PetscObjectComm((PetscObject)dmf_shell),PETSC_ERR_SUP,"Only mode = SCATTER_FORWARD, SCATTER_REVERSE supported");
539: return 0;
540: }
542: PetscErrorCode DMStateScatter_ShellDA(DM dmf_shell,ScatterMode mode,DM dmc_shell)
543: {
544: PetscMPIInt size_f = 0,size_c = 0;
546: MPI_Comm_size(PetscObjectComm((PetscObject)dmf_shell),&size_f);
547: if (dmc_shell) {
548: MPI_Comm_size(PetscObjectComm((PetscObject)dmc_shell),&size_c);
549: }
550: if (mode == SCATTER_FORWARD) {
551: PetscPrintf(PetscObjectComm((PetscObject)dmf_shell),"User supplied state scatter (fine [size %d]-> coarse [size %d])\n",(int)size_f,(int)size_c);
552: } else if (mode == SCATTER_REVERSE) {
553: } else SETERRQ(PetscObjectComm((PetscObject)dmf_shell),PETSC_ERR_SUP,"Only mode = SCATTER_FORWARD, SCATTER_REVERSE supported");
554: return 0;
555: }
557: PetscErrorCode DMShellCreate_ShellDA(DM da,DM *dms)
558: {
560: if (da) {
561: DMShellCreate(PetscObjectComm((PetscObject)da),dms);
562: DMShellSetContext(*dms,da);
563: DMShellSetCreateGlobalVector(*dms,DMCreateGlobalVector_ShellDA);
564: DMShellSetCreateLocalVector(*dms,DMCreateLocalVector_ShellDA);
565: DMShellSetCreateMatrix(*dms,DMCreateMatrix_ShellDA);
566: DMShellSetCoarsen(*dms,DMCoarsen_ShellDA);
567: DMShellSetCreateInterpolation(*dms,DMCreateInterpolation_ShellDA);
568: } else {
569: *dms = NULL;
570: }
571: return 0;
572: }
574: PetscErrorCode DMDestroyShellDMDA(DM *_dm)
575: {
576: DM dm,da = NULL;
579: if (!_dm) return 0;
580: dm = *_dm;
581: if (!dm) return 0;
583: DMShellGetContext(dm,&da);
584: if (da) {
585: Vec vec;
586: VecScatter scatter = NULL;
587: IS is = NULL;
588: Mat P = NULL;
590: PetscObjectQuery((PetscObject)da,"P",(PetscObject*)&P);
591: MatDestroy(&P);
593: vec = NULL;
594: PetscObjectQuery((PetscObject)da,"xp",(PetscObject*)&vec);
595: VecDestroy(&vec);
597: PetscObjectQuery((PetscObject)da,"scatter",(PetscObject*)&scatter);
598: VecScatterDestroy(&scatter);
600: vec = NULL;
601: PetscObjectQuery((PetscObject)da,"xtmp",(PetscObject*)&vec);
602: VecDestroy(&vec);
604: PetscObjectQuery((PetscObject)da,"isin",(PetscObject*)&is);
605: ISDestroy(&is);
607: DMDestroy(&da);
608: }
609: DMDestroy(&dm);
610: *_dm = NULL;
611: return 0;
612: }
614: PetscErrorCode HierarchyCreate_Basic(DM *dm_f,DM *dm_c,UserContext *ctx)
615: {
616: DM dm,dmc,dm_shell,dmc_shell;
617: PetscMPIInt rank;
620: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
621: DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,17,17,PETSC_DECIDE,PETSC_DECIDE,1,1,0,0,&dm);
622: DMSetFromOptions(dm);
623: DMSetUp(dm);
624: DMDASetUniformCoordinates(dm,0,1,0,1,0,0);
625: DMDASetFieldName(dm,0,"Pressure");
626: DMShellCreate_ShellDA(dm,&dm_shell);
627: DMSetApplicationContext(dm_shell,ctx);
629: dmc = NULL;
630: dmc_shell = NULL;
631: if (rank == 0) {
632: DMDACreate2d(PETSC_COMM_SELF,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,17,17,PETSC_DECIDE,PETSC_DECIDE,1,1,0,0,&dmc);
633: DMSetFromOptions(dmc);
634: DMSetUp(dmc);
635: DMDASetUniformCoordinates(dmc,0,1,0,1,0,0);
636: DMDASetFieldName(dmc,0,"Pressure");
637: DMShellCreate_ShellDA(dmc,&dmc_shell);
638: DMSetApplicationContext(dmc_shell,ctx);
639: }
641: DMSetCoarseDM(dm_shell,dmc_shell);
642: DMShellDASetUp_TelescopeDMScatter(dm_shell,dmc_shell);
644: *dm_f = dm_shell;
645: *dm_c = dmc_shell;
646: return 0;
647: }
649: PetscErrorCode HierarchyCreate(PetscInt *_nd,PetscInt *_nref,MPI_Comm **_cl,DM **_dl)
650: {
651: PetscInt d,k,ndecomps,ncoarsen,found,nx;
652: PetscInt levelrefs,*number;
653: PetscSubcomm *pscommlist;
654: MPI_Comm *commlist;
655: DM *dalist,*dmlist;
656: PetscBool set;
659: ndecomps = 1;
660: PetscOptionsGetInt(NULL,NULL,"-ndecomps",&ndecomps,NULL);
661: ncoarsen = ndecomps - 1;
664: levelrefs = 2;
665: PetscOptionsGetInt(NULL,NULL,"-level_nrefs",&levelrefs,NULL);
666: PetscMalloc1(ncoarsen+1,&number);
667: for (k=0; k<ncoarsen+1; k++) {
668: number[k] = 2;
669: }
670: found = ncoarsen;
671: set = PETSC_FALSE;
672: PetscOptionsGetIntArray(NULL,NULL,"-level_comm_red_factor",number,&found,&set);
673: if (set) {
675: }
677: PetscMalloc1(ncoarsen+1,&pscommlist);
678: for (k=0; k<ncoarsen+1; k++) {
679: pscommlist[k] = NULL;
680: }
682: PetscMalloc1(ndecomps,&commlist);
683: for (k=0; k<ndecomps; k++) {
684: commlist[k] = MPI_COMM_NULL;
685: }
686: PetscMalloc1(ndecomps*levelrefs,&dalist);
687: PetscMalloc1(ndecomps*levelrefs,&dmlist);
688: for (k=0; k<ndecomps*levelrefs; k++) {
689: dalist[k] = NULL;
690: dmlist[k] = NULL;
691: }
693: CommHierarchyCreate(PETSC_COMM_WORLD,ncoarsen,number,pscommlist);
694: for (k=0; k<ncoarsen; k++) {
695: if (pscommlist[k]) {
696: MPI_Comm comm_k = PetscSubcommChild(pscommlist[k]);
697: if (pscommlist[k]->color == 0) {
698: PetscCommDuplicate(comm_k,&commlist[k],NULL);
699: }
700: }
701: }
702: PetscCommDuplicate(PETSC_COMM_WORLD,&commlist[ndecomps-1],NULL);
704: for (k=0; k<ncoarsen; k++) {
705: if (pscommlist[k]) {
706: PetscSubcommDestroy(&pscommlist[k]);
707: }
708: }
710: nx = 17;
711: PetscOptionsGetInt(NULL,NULL,"-m",&nx,NULL);
712: for (d=0; d<ndecomps; d++) {
713: DM dmroot = NULL;
714: char name[PETSC_MAX_PATH_LEN];
716: if (commlist[d] != MPI_COMM_NULL) {
717: DMDACreate2d(commlist[d],DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,nx,nx,PETSC_DECIDE,PETSC_DECIDE,1,1,0,0,&dmroot);
718: DMSetUp(dmroot);
719: DMDASetUniformCoordinates(dmroot,0,1,0,1,0,0);
720: DMDASetFieldName(dmroot,0,"Pressure");
721: PetscSNPrintf(name,PETSC_MAX_PATH_LEN-1,"root-decomp-%D",d);
722: PetscObjectSetName((PetscObject)dmroot,name);
723: /*DMView(dmroot,PETSC_VIEWER_STDOUT_(commlist[d]));*/
724: }
726: dalist[d*levelrefs + 0] = dmroot;
727: for (k=1; k<levelrefs; k++) {
728: DM dmref = NULL;
730: if (commlist[d] != MPI_COMM_NULL) {
731: DMRefine(dalist[d*levelrefs + (k-1)],MPI_COMM_NULL,&dmref);
732: PetscSNPrintf(name,PETSC_MAX_PATH_LEN-1,"ref%D-decomp-%D",k,d);
733: PetscObjectSetName((PetscObject)dmref,name);
734: DMDAGetInfo(dmref,NULL,&nx,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
735: /*DMView(dmref,PETSC_VIEWER_STDOUT_(commlist[d]));*/
736: }
737: dalist[d*levelrefs + k] = dmref;
738: }
739: MPI_Allreduce(MPI_IN_PLACE,&nx,1,MPIU_INT,MPI_MAX,PETSC_COMM_WORLD);
740: }
742: /* create the hierarchy of DMShell's */
743: for (d=0; d<ndecomps; d++) {
744: char name[PETSC_MAX_PATH_LEN];
746: UserContext *ctx = NULL;
747: if (commlist[d] != MPI_COMM_NULL) {
748: UserContextCreate(commlist[d],&ctx);
749: for (k=0; k<levelrefs; k++) {
750: DMShellCreate_ShellDA(dalist[d*levelrefs + k],&dmlist[d*levelrefs + k]);
751: DMSetApplicationContext(dmlist[d*levelrefs + k],ctx);
752: PetscSNPrintf(name,PETSC_MAX_PATH_LEN-1,"level%D-decomp-%D",k,d);
753: PetscObjectSetName((PetscObject)dmlist[d*levelrefs + k],name);
754: }
755: }
756: }
758: /* set all the coarse DMs */
759: for (k=1; k<ndecomps*levelrefs; k++) { /* skip first DM as it doesn't have a coarse representation */
760: DM dmfine = NULL,dmcoarse = NULL;
762: dmfine = dmlist[k];
763: dmcoarse = dmlist[k-1];
764: if (dmfine) {
765: DMSetCoarseDM(dmfine,dmcoarse);
766: }
767: }
769: /* do special setup on the fine DM coupling different decompositions */
770: for (d=1; d<ndecomps; d++) { /* skip first decomposition as it doesn't have a coarse representation */
771: DM dmfine = NULL,dmcoarse = NULL;
773: dmfine = dmlist[d*levelrefs + 0];
774: dmcoarse = dmlist[(d-1)*levelrefs + (levelrefs-1)];
775: if (dmfine) {
776: DMShellDASetUp_TelescopeDMScatter(dmfine,dmcoarse);
777: }
778: }
780: PetscFree(number);
781: for (k=0; k<ncoarsen; k++) {
782: PetscSubcommDestroy(&pscommlist[k]);
783: }
784: PetscFree(pscommlist);
786: if (_nd) {
787: *_nd = ndecomps;
788: }
789: if (_nref) {
790: *_nref = levelrefs;
791: }
792: if (_cl) {
793: *_cl = commlist;
794: } else {
795: for (k=0; k<ndecomps; k++) {
796: if (commlist[k] != MPI_COMM_NULL) {
797: PetscCommDestroy(&commlist[k]);
798: }
799: }
800: PetscFree(commlist);
801: }
802: if (_dl) {
803: *_dl = dmlist;
804: PetscFree(dalist);
805: } else {
806: for (k=0; k<ndecomps*levelrefs; k++) {
807: DMDestroy(&dmlist[k]);
808: DMDestroy(&dalist[k]);
809: }
810: PetscFree(dmlist);
811: PetscFree(dalist);
812: }
813: return 0;
814: }
816: PetscErrorCode test_hierarchy(void)
817: {
818: PetscInt d,k,nd,nref;
819: MPI_Comm *comms;
820: DM *dms;
823: HierarchyCreate(&nd,&nref,&comms,&dms);
825: /* destroy user context */
826: for (d=0; d<nd; d++) {
827: DM first = dms[d*nref+0];
829: if (first) {
830: UserContext *ctx = NULL;
832: DMGetApplicationContext(first,&ctx);
833: if (ctx) PetscFree(ctx);
834: DMSetApplicationContext(first,NULL);
835: }
836: for (k=1; k<nref; k++) {
837: DM dm = dms[d*nref+k];
838: if (dm) {
839: DMSetApplicationContext(dm,NULL);
840: }
841: }
842: }
844: /* destroy DMs */
845: for (k=0; k<nd*nref; k++) {
846: if (dms[k]) {
847: DMDestroyShellDMDA(&dms[k]);
848: }
849: }
850: PetscFree(dms);
852: /* destroy communicators */
853: for (k=0; k<nd; k++) {
854: if (comms[k] != MPI_COMM_NULL) {
855: PetscCommDestroy(&comms[k]);
856: }
857: }
858: PetscFree(comms);
859: return 0;
860: }
862: PetscErrorCode test_basic(void)
863: {
864: DM dmF,dmdaF = NULL,dmC = NULL;
865: Mat A;
866: Vec x,b;
867: KSP ksp;
868: PC pc;
869: UserContext *user = NULL;
872: UserContextCreate(PETSC_COMM_WORLD,&user);
873: HierarchyCreate_Basic(&dmF,&dmC,user);
874: DMShellGetContext(dmF,&dmdaF);
876: DMCreateMatrix(dmF,&A);
877: DMCreateGlobalVector(dmF,&x);
878: DMCreateGlobalVector(dmF,&b);
879: ComputeRHS_DMDA(dmdaF,b,user);
881: KSPCreate(PETSC_COMM_WORLD,&ksp);
882: KSPSetComputeOperators(ksp,ComputeMatrix_ShellDA,user);
883: /*KSPSetOperators(ksp,A,A);*/
884: KSPSetDM(ksp,dmF);
885: KSPSetDMActive(ksp,PETSC_TRUE);
886: KSPSetFromOptions(ksp);
887: KSPGetPC(ksp,&pc);
888: PCTelescopeSetUseCoarseDM(pc,PETSC_TRUE);
890: KSPSolve(ksp,b,x);
892: if (dmC) {
893: DMDestroyShellDMDA(&dmC);
894: }
895: DMDestroyShellDMDA(&dmF);
896: KSPDestroy(&ksp);
897: MatDestroy(&A);
898: VecDestroy(&b);
899: VecDestroy(&x);
900: PetscFree(user);
901: return 0;
902: }
904: PetscErrorCode test_mg(void)
905: {
906: DM dmF,dmdaF = NULL,*dms = NULL;
907: Mat A;
908: Vec x,b;
909: KSP ksp;
910: PetscInt k,d,nd,nref;
911: MPI_Comm *comms = NULL;
912: UserContext *user = NULL;
915: HierarchyCreate(&nd,&nref,&comms,&dms);
916: dmF = dms[nd*nref-1];
918: DMShellGetContext(dmF,&dmdaF);
919: DMGetApplicationContext(dmF,&user);
921: DMCreateMatrix(dmF,&A);
922: DMCreateGlobalVector(dmF,&x);
923: DMCreateGlobalVector(dmF,&b);
924: ComputeRHS_DMDA(dmdaF,b,user);
926: KSPCreate(PETSC_COMM_WORLD,&ksp);
927: KSPSetComputeOperators(ksp,ComputeMatrix_ShellDA,user);
928: /*KSPSetOperators(ksp,A,A);*/
929: KSPSetDM(ksp,dmF);
930: KSPSetDMActive(ksp,PETSC_TRUE);
931: KSPSetFromOptions(ksp);
933: KSPSolve(ksp,b,x);
935: for (d=0; d<nd; d++) {
936: DM first = dms[d*nref+0];
938: if (first) {
939: UserContext *ctx = NULL;
941: DMGetApplicationContext(first,&ctx);
942: if (ctx) PetscFree(ctx);
943: DMSetApplicationContext(first,NULL);
944: }
945: for (k=1; k<nref; k++) {
946: DM dm = dms[d*nref+k];
947: if (dm) {
948: DMSetApplicationContext(dm,NULL);
949: }
950: }
951: }
953: for (k=0; k<nd*nref; k++) {
954: if (dms[k]) {
955: DMDestroyShellDMDA(&dms[k]);
956: }
957: }
958: PetscFree(dms);
960: KSPDestroy(&ksp);
961: MatDestroy(&A);
962: VecDestroy(&b);
963: VecDestroy(&x);
965: for (k=0; k<nd; k++) {
966: if (comms[k] != MPI_COMM_NULL) {
967: PetscCommDestroy(&comms[k]);
968: }
969: }
970: PetscFree(comms);
971: return 0;
972: }
974: int main(int argc,char **argv)
975: {
976: PetscInt test_id = 0;
978: PetscInitialize(&argc,&argv,(char*)0,help);
979: PetscOptionsGetInt(NULL,NULL,"-tid",&test_id,NULL);
980: switch (test_id) {
981: case 0:
982: test_basic();
983: break;
984: case 1:
985: test_hierarchy();
986: break;
987: case 2:
988: test_mg();
989: break;
990: default:
991: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"-tid must be 0,1,2");
992: }
993: PetscFinalize();
994: return 0;
995: }
997: PetscErrorCode ComputeRHS_DMDA(DM da,Vec b,void *ctx)
998: {
999: UserContext *user = (UserContext*)ctx;
1000: PetscInt i,j,mx,my,xm,ym,xs,ys;
1001: PetscScalar Hx,Hy;
1002: PetscScalar **array;
1003: PetscBool isda = PETSC_FALSE;
1006: PetscObjectTypeCompare((PetscObject)da,DMDA,&isda);
1008: DMDAGetInfo(da,NULL,&mx,&my,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
1009: Hx = 1.0 / (PetscReal)(mx-1);
1010: Hy = 1.0 / (PetscReal)(my-1);
1011: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
1012: DMDAVecGetArray(da,b,&array);
1013: for (j=ys; j<ys+ym; j++) {
1014: for (i=xs; i<xs+xm; i++) {
1015: array[j][i] = PetscExpScalar(-((PetscReal)i*Hx)*((PetscReal)i*Hx)/user->nu)*PetscExpScalar(-((PetscReal)j*Hy)*((PetscReal)j*Hy)/user->nu)*Hx*Hy;
1016: }
1017: }
1018: DMDAVecRestoreArray(da, b, &array);
1019: VecAssemblyBegin(b);
1020: VecAssemblyEnd(b);
1022: /* force right hand side to be consistent for singular matrix */
1023: /* note this is really a hack, normally the model would provide you with a consistent right handside */
1024: if (user->bcType == NEUMANN) {
1025: MatNullSpace nullspace;
1027: MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,0,&nullspace);
1028: MatNullSpaceRemove(nullspace,b);
1029: MatNullSpaceDestroy(&nullspace);
1030: }
1031: return 0;
1032: }
1034: PetscErrorCode ComputeRho(PetscInt i, PetscInt j, PetscInt mx, PetscInt my, PetscReal centerRho, PetscReal *rho)
1035: {
1037: if ((i > mx/3.0) && (i < 2.0*mx/3.0) && (j > my/3.0) && (j < 2.0*my/3.0)) {
1038: *rho = centerRho;
1039: } else {
1040: *rho = 1.0;
1041: }
1042: return 0;
1043: }
1045: PetscErrorCode ComputeMatrix_DMDA(DM da,Mat J,Mat jac,void *ctx)
1046: {
1047: UserContext *user = (UserContext*)ctx;
1048: PetscReal centerRho;
1049: PetscInt i,j,mx,my,xm,ym,xs,ys;
1050: PetscScalar v[5];
1051: PetscReal Hx,Hy,HydHx,HxdHy,rho;
1052: MatStencil row, col[5];
1053: PetscBool isda = PETSC_FALSE;
1056: PetscObjectTypeCompare((PetscObject)da,DMDA,&isda);
1058: MatZeroEntries(jac);
1059: centerRho = user->rho;
1060: DMDAGetInfo(da,NULL,&mx,&my,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
1061: Hx = 1.0 / (PetscReal)(mx-1);
1062: Hy = 1.0 / (PetscReal)(my-1);
1063: HxdHy = Hx/Hy;
1064: HydHx = Hy/Hx;
1065: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
1066: for (j=ys; j<ys+ym; j++) {
1067: for (i=xs; i<xs+xm; i++) {
1068: row.i = i; row.j = j;
1069: ComputeRho(i, j, mx, my, centerRho, &rho);
1070: if (i==0 || j==0 || i==mx-1 || j==my-1) {
1071: if (user->bcType == DIRICHLET) {
1072: v[0] = 2.0*rho*(HxdHy + HydHx);
1073: MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES);
1074: } else if (user->bcType == NEUMANN) {
1075: PetscInt numx = 0, numy = 0, num = 0;
1076: if (j!=0) {
1077: v[num] = -rho*HxdHy; col[num].i = i; col[num].j = j-1;
1078: numy++; num++;
1079: }
1080: if (i!=0) {
1081: v[num] = -rho*HydHx; col[num].i = i-1; col[num].j = j;
1082: numx++; num++;
1083: }
1084: if (i!=mx-1) {
1085: v[num] = -rho*HydHx; col[num].i = i+1; col[num].j = j;
1086: numx++; num++;
1087: }
1088: if (j!=my-1) {
1089: v[num] = -rho*HxdHy; col[num].i = i; col[num].j = j+1;
1090: numy++; num++;
1091: }
1092: v[num] = numx*rho*HydHx + numy*rho*HxdHy; col[num].i = i; col[num].j = j;
1093: num++;
1094: MatSetValuesStencil(jac,1,&row,num,col,v,INSERT_VALUES);
1095: }
1096: } else {
1097: v[0] = -rho*HxdHy; col[0].i = i; col[0].j = j-1;
1098: v[1] = -rho*HydHx; col[1].i = i-1; col[1].j = j;
1099: v[2] = 2.0*rho*(HxdHy + HydHx); col[2].i = i; col[2].j = j;
1100: v[3] = -rho*HydHx; col[3].i = i+1; col[3].j = j;
1101: v[4] = -rho*HxdHy; col[4].i = i; col[4].j = j+1;
1102: MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES);
1103: }
1104: }
1105: }
1106: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
1107: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
1108: return 0;
1109: }
1111: PetscErrorCode ComputeMatrix_ShellDA(KSP ksp,Mat J,Mat jac,void *ctx)
1112: {
1113: DM dm,da;
1115: KSPGetDM(ksp,&dm);
1116: DMShellGetContext(dm,&da);
1117: ComputeMatrix_DMDA(da,J,jac,ctx);
1118: return 0;
1119: }
1121: /*TEST
1123: test:
1124: suffix: basic_dirichlet
1125: nsize: 4
1126: args: -tid 0 -ksp_monitor_short -pc_type telescope -telescope_ksp_max_it 100000 -telescope_pc_type lu -telescope_ksp_type fgmres -telescope_ksp_monitor_short -ksp_type gcr
1128: test:
1129: suffix: basic_neumann
1130: nsize: 4
1131: requires: !single
1132: args: -tid 0 -ksp_monitor_short -pc_type telescope -telescope_ksp_max_it 100000 -telescope_pc_type jacobi -telescope_ksp_type fgmres -telescope_ksp_monitor_short -ksp_type gcr -bc_type neumann
1134: test:
1135: suffix: mg_2lv_2mg
1136: nsize: 6
1137: args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 2 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -level_comm_red_factor 6 -mg_coarse_telescope_mg_coarse_pc_type lu
1139: test:
1140: suffix: mg_3lv_2mg
1141: nsize: 4
1142: args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 3 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_coarse_pc_type telescope -mg_coarse_telescope_mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_mg_coarse_telescope_pc_type lu -m 5
1144: test:
1145: suffix: mg_3lv_2mg_customcommsize
1146: nsize: 12
1147: args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 3 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -m 5 -level_comm_red_factor 2,6 -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_coarse_pc_type telescope -mg_coarse_telescope_mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_mg_coarse_telescope_pc_type lu
1149: TEST*/