Actual source code: gr2.c


  2: /*
  3:    Plots vectors obtained with DMDACreate2d()
  4: */

  6: #include <petsc/private/dmdaimpl.h>
  7: #include <petsc/private/glvisvecimpl.h>
  8: #include <petsc/private/viewerhdf5impl.h>
  9: #include <petscdraw.h>

 11: /*
 12:         The data that is passed into the graphics callback
 13: */
 14: typedef struct {
 15:   PetscMPIInt       rank;
 16:   PetscInt          m,n,dof,k;
 17:   PetscReal         xmin,xmax,ymin,ymax,min,max;
 18:   const PetscScalar *xy,*v;
 19:   PetscBool         showaxis,showgrid;
 20:   const char        *name0,*name1;
 21: } ZoomCtx;

 23: /*
 24:        This does the drawing for one particular field
 25:     in one particular set of coordinates. It is a callback
 26:     called from PetscDrawZoom()
 27: */
 28: PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw,void *ctx)
 29: {
 30:   ZoomCtx           *zctx = (ZoomCtx*)ctx;
 31:   PetscInt          m,n,i,j,k,dof,id,c1,c2,c3,c4;
 32:   PetscReal         min,max,x1,x2,x3,x4,y_1,y2,y3,y4;
 33:   const PetscScalar *xy,*v;
 34:   PetscErrorCode    ierr;

 36:   m    = zctx->m;
 37:   n    = zctx->n;
 38:   dof  = zctx->dof;
 39:   k    = zctx->k;
 40:   xy   = zctx->xy;
 41:   v    = zctx->v;
 42:   min  = zctx->min;
 43:   max  = zctx->max;

 45:   /* PetscDraw the contour plot patch */
 46:   PetscDrawCollectiveBegin(draw);
 47:   for (j=0; j<n-1; j++) {
 48:     for (i=0; i<m-1; i++) {
 49:       id   = i+j*m;
 50:       x1   = PetscRealPart(xy[2*id]);
 51:       y_1  = PetscRealPart(xy[2*id+1]);
 52:       c1   = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);

 54:       id   = i+j*m+1;
 55:       x2   = PetscRealPart(xy[2*id]);
 56:       y2   = PetscRealPart(xy[2*id+1]);
 57:       c2   = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);

 59:       id   = i+j*m+1+m;
 60:       x3   = PetscRealPart(xy[2*id]);
 61:       y3   = PetscRealPart(xy[2*id+1]);
 62:       c3   = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);

 64:       id   = i+j*m+m;
 65:       x4   = PetscRealPart(xy[2*id]);
 66:       y4   = PetscRealPart(xy[2*id+1]);
 67:       c4   = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);

 69:       PetscDrawTriangle(draw,x1,y_1,x2,y2,x3,y3,c1,c2,c3);
 70:       PetscDrawTriangle(draw,x1,y_1,x3,y3,x4,y4,c1,c3,c4);
 71:       if (zctx->showgrid) {
 72:         PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);
 73:         PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);
 74:         PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);
 75:         PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);
 76:       }
 77:     }
 78:   }
 79:   if (zctx->showaxis && !zctx->rank) {
 80:     if (zctx->name0 || zctx->name1) {
 81:       PetscReal xl,yl,xr,yr,x,y;
 82:       PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr);
 83:       x  = xl + .30*(xr - xl);
 84:       xl = xl + .01*(xr - xl);
 85:       y  = yr - .30*(yr - yl);
 86:       yl = yl + .01*(yr - yl);
 87:       if (zctx->name0) PetscDrawString(draw,x,yl,PETSC_DRAW_BLACK,zctx->name0);
 88:       if (zctx->name1) PetscDrawStringVertical(draw,xl,y,PETSC_DRAW_BLACK,zctx->name1);
 89:     }
 90:     /*
 91:        Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits
 92:        but that may require some refactoring.
 93:     */
 94:     {
 95:       double xmin = (double)zctx->xmin, ymin = (double)zctx->ymin;
 96:       double xmax = (double)zctx->xmax, ymax = (double)zctx->ymax;
 97:       char   value[16]; size_t len; PetscReal w;
 98:       PetscSNPrintf(value,16,"%0.2e",xmin);
 99:       PetscDrawString(draw,xmin,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);
100:       PetscSNPrintf(value,16,"%0.2e",xmax);
101:       PetscStrlen(value,&len);
102:       PetscDrawStringGetSize(draw,&w,NULL);
103:       PetscDrawString(draw,xmax - len*w,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);
104:       PetscSNPrintf(value,16,"%0.2e",ymin);
105:       PetscDrawString(draw,xmin - .05*(xmax - xmin),ymin,PETSC_DRAW_BLACK,value);
106:       PetscSNPrintf(value,16,"%0.2e",ymax);
107:       PetscDrawString(draw,xmin - .05*(xmax - xmin),ymax,PETSC_DRAW_BLACK,value);
108:     }
109:   }
110:   PetscDrawCollectiveEnd(draw);
111:   return 0;
112: }

114: PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin,PetscViewer viewer)
115: {
116:   DM                 da,dac,dag;
117:   PetscInt           N,s,M,w,ncoors = 4;
118:   const PetscInt     *lx,*ly;
119:   PetscReal          coors[4];
120:   PetscDraw          draw,popup;
121:   PetscBool          isnull,useports = PETSC_FALSE;
122:   MPI_Comm           comm;
123:   Vec                xlocal,xcoor,xcoorl;
124:   DMBoundaryType     bx,by;
125:   DMDAStencilType    st;
126:   ZoomCtx            zctx;
127:   PetscDrawViewPorts *ports = NULL;
128:   PetscViewerFormat  format;
129:   PetscInt           *displayfields;
130:   PetscInt           ndisplayfields,i,nbounds;
131:   const PetscReal    *bounds;

133:   zctx.showgrid = PETSC_FALSE;
134:   zctx.showaxis = PETSC_TRUE;

136:   PetscViewerDrawGetDraw(viewer,0,&draw);
137:   PetscDrawIsNull(draw,&isnull);
138:   if (isnull) return 0;

140:   PetscViewerDrawGetBounds(viewer,&nbounds,&bounds);

142:   VecGetDM(xin,&da);

145:   PetscObjectGetComm((PetscObject)xin,&comm);
146:   MPI_Comm_rank(comm,&zctx.rank);

148:   DMDAGetInfo(da,NULL,&M,&N,NULL,&zctx.m,&zctx.n,NULL,&w,&s,&bx,&by,NULL,&st);
149:   DMDAGetOwnershipRanges(da,&lx,&ly,NULL);

151:   /*
152:      Obtain a sequential vector that is going to contain the local values plus ONE layer of
153:      ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will
154:      update the local values pluse ONE layer of ghost values.
155:   */
156:   PetscObjectQuery((PetscObject)da,"GraphicsGhosted",(PetscObject*)&xlocal);
157:   if (!xlocal) {
158:     if (bx !=  DM_BOUNDARY_NONE || by !=  DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
159:       /*
160:          if original da is not of stencil width one, or periodic or not a box stencil then
161:          create a special DMDA to handle one level of ghost points for graphics
162:       */
163:       DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,w,1,lx,ly,&dac);
164:       DMSetUp(dac);
165:       PetscInfo(da,"Creating auxilary DMDA for managing graphics ghost points\n");
166:     } else {
167:       /* otherwise we can use the da we already have */
168:       dac = da;
169:     }
170:     /* create local vector for holding ghosted values used in graphics */
171:     DMCreateLocalVector(dac,&xlocal);
172:     if (dac != da) {
173:       /* don't keep any public reference of this DMDA, is is only available through xlocal */
174:       PetscObjectDereference((PetscObject)dac);
175:     } else {
176:       /* remove association between xlocal and da, because below we compose in the opposite
177:          direction and if we left this connect we'd get a loop, so the objects could
178:          never be destroyed */
179:       PetscObjectRemoveReference((PetscObject)xlocal,"__PETSc_dm");
180:     }
181:     PetscObjectCompose((PetscObject)da,"GraphicsGhosted",(PetscObject)xlocal);
182:     PetscObjectDereference((PetscObject)xlocal);
183:   } else {
184:     if (bx !=  DM_BOUNDARY_NONE || by !=  DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
185:       VecGetDM(xlocal, &dac);
186:     } else {
187:       dac = da;
188:     }
189:   }

191:   /*
192:       Get local (ghosted) values of vector
193:   */
194:   DMGlobalToLocalBegin(dac,xin,INSERT_VALUES,xlocal);
195:   DMGlobalToLocalEnd(dac,xin,INSERT_VALUES,xlocal);
196:   VecGetArrayRead(xlocal,&zctx.v);

198:   /*
199:       Get coordinates of nodes
200:   */
201:   DMGetCoordinates(da,&xcoor);
202:   if (!xcoor) {
203:     DMDASetUniformCoordinates(da,0.0,1.0,0.0,1.0,0.0,0.0);
204:     DMGetCoordinates(da,&xcoor);
205:   }

207:   /*
208:       Determine the min and max coordinates in plot
209:   */
210:   VecStrideMin(xcoor,0,NULL,&zctx.xmin);
211:   VecStrideMax(xcoor,0,NULL,&zctx.xmax);
212:   VecStrideMin(xcoor,1,NULL,&zctx.ymin);
213:   VecStrideMax(xcoor,1,NULL,&zctx.ymax);
214:   PetscOptionsGetBool(NULL,NULL,"-draw_contour_axis",&zctx.showaxis,NULL);
215:   if (zctx.showaxis) {
216:     coors[0] = zctx.xmin - .05*(zctx.xmax - zctx.xmin); coors[1] = zctx.ymin - .05*(zctx.ymax - zctx.ymin);
217:     coors[2] = zctx.xmax + .05*(zctx.xmax - zctx.xmin); coors[3] = zctx.ymax + .05*(zctx.ymax - zctx.ymin);
218:   } else {
219:     coors[0] = zctx.xmin; coors[1] = zctx.ymin; coors[2] = zctx.xmax; coors[3] = zctx.ymax;
220:   }
221:   PetscOptionsGetRealArray(NULL,NULL,"-draw_coordinates",coors,&ncoors,NULL);
222:   PetscInfo(da,"Preparing DMDA 2d contour plot coordinates %g %g %g %g\n",(double)coors[0],(double)coors[1],(double)coors[2],(double)coors[3]);

224:   /*
225:       Get local ghosted version of coordinates
226:   */
227:   PetscObjectQuery((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject*)&xcoorl);
228:   if (!xcoorl) {
229:     /* create DMDA to get local version of graphics */
230:     DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,2,1,lx,ly,&dag);
231:     DMSetUp(dag);
232:     PetscInfo(dag,"Creating auxilary DMDA for managing graphics coordinates ghost points\n");
233:     DMCreateLocalVector(dag,&xcoorl);
234:     PetscObjectCompose((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject)xcoorl);
235:     PetscObjectDereference((PetscObject)dag);
236:     PetscObjectDereference((PetscObject)xcoorl);
237:   } else {
238:     VecGetDM(xcoorl,&dag);
239:   }
240:   DMGlobalToLocalBegin(dag,xcoor,INSERT_VALUES,xcoorl);
241:   DMGlobalToLocalEnd(dag,xcoor,INSERT_VALUES,xcoorl);
242:   VecGetArrayRead(xcoorl,&zctx.xy);
243:   DMDAGetCoordinateName(da,0,&zctx.name0);
244:   DMDAGetCoordinateName(da,1,&zctx.name1);

246:   /*
247:       Get information about size of area each processor must do graphics for
248:   */
249:   DMDAGetInfo(dac,NULL,&M,&N,NULL,NULL,NULL,NULL,&zctx.dof,NULL,&bx,&by,NULL,NULL);
250:   DMDAGetGhostCorners(dac,NULL,NULL,NULL,&zctx.m,&zctx.n,NULL);
251:   PetscOptionsGetBool(NULL,NULL,"-draw_contour_grid",&zctx.showgrid,NULL);

253:   DMDASelectFields(da,&ndisplayfields,&displayfields);
254:   PetscViewerGetFormat(viewer,&format);
255:   PetscOptionsGetBool(NULL,NULL,"-draw_ports",&useports,NULL);
256:   if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE;
257:   if (useports) {
258:     PetscViewerDrawGetDraw(viewer,0,&draw);
259:     PetscDrawCheckResizedWindow(draw);
260:     PetscDrawClear(draw);
261:     PetscDrawViewPortsCreate(draw,ndisplayfields,&ports);
262:   }

264:   /*
265:       Loop over each field; drawing each in a different window
266:   */
267:   for (i=0; i<ndisplayfields; i++) {
268:     zctx.k = displayfields[i];

270:     /* determine the min and max value in plot */
271:     VecStrideMin(xin,zctx.k,NULL,&zctx.min);
272:     VecStrideMax(xin,zctx.k,NULL,&zctx.max);
273:     if (zctx.k < nbounds) {
274:       zctx.min = bounds[2*zctx.k];
275:       zctx.max = bounds[2*zctx.k+1];
276:     }
277:     if (zctx.min == zctx.max) {
278:       zctx.min -= 1.e-12;
279:       zctx.max += 1.e-12;
280:     }
281:     PetscInfo(da,"DMDA 2d contour plot min %g max %g\n",(double)zctx.min,(double)zctx.max);

283:     if (useports) {
284:       PetscDrawViewPortsSet(ports,i);
285:     } else {
286:       const char *title;
287:       PetscViewerDrawGetDraw(viewer,i,&draw);
288:       DMDAGetFieldName(da,zctx.k,&title);
289:       if (title) PetscDrawSetTitle(draw,title);
290:     }

292:     PetscDrawGetPopup(draw,&popup);
293:     PetscDrawScalePopup(popup,zctx.min,zctx.max);
294:     PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);
295:     PetscDrawZoom(draw,VecView_MPI_Draw_DA2d_Zoom,&zctx);
296:     if (!useports) PetscDrawSave(draw);
297:   }
298:   if (useports) {
299:     PetscViewerDrawGetDraw(viewer,0,&draw);
300:     PetscDrawSave(draw);
301:   }

303:   PetscDrawViewPortsDestroy(ports);
304:   PetscFree(displayfields);
305:   VecRestoreArrayRead(xcoorl,&zctx.xy);
306:   VecRestoreArrayRead(xlocal,&zctx.v);
307:   return 0;
308: }

310: #if defined(PETSC_HAVE_HDF5)
311: static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims)
312: {
313:   PetscMPIInt    comm_size;
314:   hsize_t        chunk_size, target_size, dim;
315:   hsize_t        vec_size = sizeof(PetscScalar)*da->M*da->N*da->P*da->w;
316:   hsize_t        avg_local_vec_size,KiB = 1024,MiB = KiB*KiB,GiB = MiB*KiB,min_size = MiB;
317:   hsize_t        max_chunks = 64*KiB;                                              /* HDF5 internal limitation */
318:   hsize_t        max_chunk_size = 4*GiB;                                           /* HDF5 internal limitation */
319:   PetscInt       zslices=da->p, yslices=da->n, xslices=da->m;

321:   MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size);
322:   avg_local_vec_size = (hsize_t) PetscCeilInt(vec_size,comm_size);      /* we will attempt to use this as the chunk size */

324:   target_size = (hsize_t) PetscMin((PetscInt64)vec_size,PetscMin((PetscInt64)max_chunk_size,PetscMax((PetscInt64)avg_local_vec_size,PetscMax(PetscCeilInt64(vec_size,max_chunks),(PetscInt64)min_size))));
325:   /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */
326:   chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(PetscReal);

328:   /*
329:    if size/rank > max_chunk_size, we need radical measures: even going down to
330:    avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter
331:    what, composed in the most efficient way possible.
332:    N.B. this minimises the number of chunks, which may or may not be the optimal
333:    solution. In a BG, for example, the optimal solution is probably to make # chunks = #
334:    IO nodes involved, but this author has no access to a BG to figure out how to
335:    reliably find the right number. And even then it may or may not be enough.
336:    */
337:   if (avg_local_vec_size > max_chunk_size) {
338:     /* check if we can just split local z-axis: is that enough? */
339:     zslices = PetscCeilInt(vec_size,da->p*max_chunk_size)*zslices;
340:     if (zslices > da->P) {
341:       /* lattice is too large in xy-directions, splitting z only is not enough */
342:       zslices = da->P;
343:       yslices = PetscCeilInt(vec_size,zslices*da->n*max_chunk_size)*yslices;
344:       if (yslices > da->N) {
345:         /* lattice is too large in x-direction, splitting along z, y is not enough */
346:         yslices = da->N;
347:         xslices = PetscCeilInt(vec_size,zslices*yslices*da->m*max_chunk_size)*xslices;
348:       }
349:     }
350:     dim = 0;
351:     if (timestep >= 0) {
352:       ++dim;
353:     }
354:     /* prefer to split z-axis, even down to planar slices */
355:     if (dimension == 3) {
356:       chunkDims[dim++] = (hsize_t) da->P/zslices;
357:       chunkDims[dim++] = (hsize_t) da->N/yslices;
358:       chunkDims[dim++] = (hsize_t) da->M/xslices;
359:     } else {
360:       /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
361:       chunkDims[dim++] = (hsize_t) da->N/yslices;
362:       chunkDims[dim++] = (hsize_t) da->M/xslices;
363:     }
364:     chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double);
365:   } else {
366:     if (target_size < chunk_size) {
367:       /* only change the defaults if target_size < chunk_size */
368:       dim = 0;
369:       if (timestep >= 0) {
370:         ++dim;
371:       }
372:       /* prefer to split z-axis, even down to planar slices */
373:       if (dimension == 3) {
374:         /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */
375:         if (target_size >= chunk_size/da->p) {
376:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
377:           chunkDims[dim] = (hsize_t) PetscCeilInt(da->P,da->p);
378:         } else {
379:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
380:            radical and let everyone write all they've got */
381:           chunkDims[dim++] = (hsize_t) PetscCeilInt(da->P,da->p);
382:           chunkDims[dim++] = (hsize_t) PetscCeilInt(da->N,da->n);
383:           chunkDims[dim++] = (hsize_t) PetscCeilInt(da->M,da->m);
384:         }
385:       } else {
386:         /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
387:         if (target_size >= chunk_size/da->n) {
388:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
389:           chunkDims[dim] = (hsize_t) PetscCeilInt(da->N,da->n);
390:         } else {
391:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
392:            radical and let everyone write all they've got */
393:           chunkDims[dim++] = (hsize_t) PetscCeilInt(da->N,da->n);
394:           chunkDims[dim++] = (hsize_t) PetscCeilInt(da->M,da->m);
395:         }

397:       }
398:       chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double);
399:     } else {
400:       /* precomputed chunks are fine, we don't need to do anything */
401:     }
402:   }
403:   return 0;
404: }
405: #endif

407: #if defined(PETSC_HAVE_HDF5)
408: PetscErrorCode VecView_MPI_HDF5_DA(Vec xin,PetscViewer viewer)
409: {
410:   PetscViewer_HDF5  *hdf5 = (PetscViewer_HDF5*) viewer->data;
411:   DM                dm;
412:   DM_DA             *da;
413:   hid_t             filespace;  /* file dataspace identifier */
414:   hid_t             chunkspace; /* chunk dataset property identifier */
415:   hid_t             dset_id;    /* dataset identifier */
416:   hid_t             memspace;   /* memory dataspace identifier */
417:   hid_t             file_id;
418:   hid_t             group;
419:   hid_t             memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
420:   hid_t             filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
421:   hsize_t           dim;
422:   hsize_t           maxDims[6]={0}, dims[6]={0}, chunkDims[6]={0}, count[6]={0}, offset[6]={0}; /* we depend on these being sane later on  */
423:   PetscBool         timestepping=PETSC_FALSE, dim2, spoutput;
424:   PetscInt          timestep=PETSC_MIN_INT, dimension;
425:   const PetscScalar *x;
426:   const char        *vecname;

428:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
429:   PetscViewerHDF5IsTimestepping(viewer, &timestepping);
430:   if (timestepping) {
431:     PetscViewerHDF5GetTimestep(viewer, &timestep);
432:   }
433:   PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
434:   PetscViewerHDF5GetSPOutput(viewer,&spoutput);

436:   VecGetDM(xin,&dm);
438:   da = (DM_DA*)dm->data;
439:   DMGetDimension(dm, &dimension);

441:   /* Create the dataspace for the dataset.
442:    *
443:    * dims - holds the current dimensions of the dataset
444:    *
445:    * maxDims - holds the maximum dimensions of the dataset (unlimited
446:    * for the number of time steps with the current dimensions for the
447:    * other dimensions; so only additional time steps can be added).
448:    *
449:    * chunkDims - holds the size of a single time step (required to
450:    * permit extending dataset).
451:    */
452:   dim = 0;
453:   if (timestep >= 0) {
454:     dims[dim]      = timestep+1;
455:     maxDims[dim]   = H5S_UNLIMITED;
456:     chunkDims[dim] = 1;
457:     ++dim;
458:   }
459:   if (dimension == 3) {
460:     PetscHDF5IntCast(da->P,dims+dim);
461:     maxDims[dim]   = dims[dim];
462:     chunkDims[dim] = dims[dim];
463:     ++dim;
464:   }
465:   if (dimension > 1) {
466:     PetscHDF5IntCast(da->N,dims+dim);
467:     maxDims[dim]   = dims[dim];
468:     chunkDims[dim] = dims[dim];
469:     ++dim;
470:   }
471:   PetscHDF5IntCast(da->M,dims+dim);
472:   maxDims[dim]   = dims[dim];
473:   chunkDims[dim] = dims[dim];
474:   ++dim;
475:   if (da->w > 1 || dim2) {
476:     PetscHDF5IntCast(da->w,dims+dim);
477:     maxDims[dim]   = dims[dim];
478:     chunkDims[dim] = dims[dim];
479:     ++dim;
480:   }
481: #if defined(PETSC_USE_COMPLEX)
482:   dims[dim]      = 2;
483:   maxDims[dim]   = dims[dim];
484:   chunkDims[dim] = dims[dim];
485:   ++dim;
486: #endif

488:   VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims);

490:   PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));

492: #if defined(PETSC_USE_REAL_SINGLE)
493:   memscalartype = H5T_NATIVE_FLOAT;
494:   filescalartype = H5T_NATIVE_FLOAT;
495: #elif defined(PETSC_USE_REAL___FLOAT128)
496: #error "HDF5 output with 128 bit floats not supported."
497: #elif defined(PETSC_USE_REAL___FP16)
498: #error "HDF5 output with 16 bit floats not supported."
499: #else
500:   memscalartype = H5T_NATIVE_DOUBLE;
501:   if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
502:   else filescalartype = H5T_NATIVE_DOUBLE;
503: #endif

505:   /* Create the dataset with default properties and close filespace */
506:   PetscObjectGetName((PetscObject)xin,&vecname);
507:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
508:     /* Create chunk */
509:     PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
510:     PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));

512:     PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
513:   } else {
514:     PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
515:     PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
516:   }
517:   PetscStackCallHDF5(H5Sclose,(filespace));

519:   /* Each process defines a dataset and writes it to the hyperslab in the file */
520:   dim = 0;
521:   if (timestep >= 0) {
522:     offset[dim] = timestep;
523:     ++dim;
524:   }
525:   if (dimension == 3) PetscHDF5IntCast(da->zs,offset + dim++);
526:   if (dimension > 1)  PetscHDF5IntCast(da->ys,offset + dim++);
527:   PetscHDF5IntCast(da->xs/da->w,offset + dim++);
528:   if (da->w > 1 || dim2) offset[dim++] = 0;
529: #if defined(PETSC_USE_COMPLEX)
530:   offset[dim++] = 0;
531: #endif
532:   dim = 0;
533:   if (timestep >= 0) {
534:     count[dim] = 1;
535:     ++dim;
536:   }
537:   if (dimension == 3) PetscHDF5IntCast(da->ze - da->zs,count + dim++);
538:   if (dimension > 1)  PetscHDF5IntCast(da->ye - da->ys,count + dim++);
539:   PetscHDF5IntCast((da->xe - da->xs)/da->w,count + dim++);
540:   if (da->w > 1 || dim2) PetscHDF5IntCast(da->w,count + dim++);
541: #if defined(PETSC_USE_COMPLEX)
542:   count[dim++] = 2;
543: #endif
544:   PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
545:   PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
546:   PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

548:   VecGetArrayRead(xin, &x);
549:   PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x));
550:   PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
551:   VecRestoreArrayRead(xin, &x);

553:   #if defined(PETSC_USE_COMPLEX)
554:   {
555:     PetscBool tru = PETSC_TRUE;
556:     PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"complex",PETSC_BOOL,&tru);
557:   }
558:   #endif
559:   if (timestepping) {
560:     PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"timestepping",PETSC_BOOL,&timestepping);
561:   }

563:   /* Close/release resources */
564:   if (group != file_id) {
565:     PetscStackCallHDF5(H5Gclose,(group));
566:   }
567:   PetscStackCallHDF5(H5Sclose,(filespace));
568:   PetscStackCallHDF5(H5Sclose,(memspace));
569:   PetscStackCallHDF5(H5Dclose,(dset_id));
570:   PetscInfo(xin,"Wrote Vec object with name %s\n",vecname);
571:   return 0;
572: }
573: #endif

575: extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec,PetscViewer);

577: #if defined(PETSC_HAVE_MPIIO)
578: static PetscErrorCode DMDAArrayMPIIO(DM da,PetscViewer viewer,Vec xin,PetscBool write)
579: {
580:   MPI_File          mfdes;
581:   PetscMPIInt       gsizes[4],lsizes[4],lstarts[4],asiz,dof;
582:   MPI_Datatype      view;
583:   const PetscScalar *array;
584:   MPI_Offset        off;
585:   MPI_Aint          ub,ul;
586:   PetscInt          type,rows,vecrows,tr[2];
587:   DM_DA             *dd = (DM_DA*)da->data;
588:   PetscBool         skipheader;

590:   VecGetSize(xin,&vecrows);
591:   PetscViewerBinaryGetSkipHeader(viewer,&skipheader);
592:   if (!write) {
593:     /* Read vector header. */
594:     if (!skipheader) {
595:       PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);
596:       type = tr[0];
597:       rows = tr[1];
600:     }
601:   } else {
602:     tr[0] = VEC_FILE_CLASSID;
603:     tr[1] = vecrows;
604:     if (!skipheader) {
605:       PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT);
606:     }
607:   }

609:   PetscMPIIntCast(dd->w,&dof);
610:   gsizes[0]  = dof;
611:   PetscMPIIntCast(dd->M,gsizes+1);
612:   PetscMPIIntCast(dd->N,gsizes+2);
613:   PetscMPIIntCast(dd->P,gsizes+3);
614:   lsizes[0]  = dof;
615:   PetscMPIIntCast((dd->xe-dd->xs)/dof,lsizes+1);
616:   PetscMPIIntCast(dd->ye-dd->ys,lsizes+2);
617:   PetscMPIIntCast(dd->ze-dd->zs,lsizes+3);
618:   lstarts[0] = 0;
619:   PetscMPIIntCast(dd->xs/dof,lstarts+1);
620:   PetscMPIIntCast(dd->ys,lstarts+2);
621:   PetscMPIIntCast(dd->zs,lstarts+3);
622:   MPI_Type_create_subarray(da->dim+1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
623:   MPI_Type_commit(&view);

625:   PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
626:   PetscViewerBinaryGetMPIIOOffset(viewer,&off);
627:   MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char*)"native",MPI_INFO_NULL);
628:   VecGetArrayRead(xin,&array);
629:   asiz = lsizes[1]*(lsizes[2] > 0 ? lsizes[2] : 1)*(lsizes[3] > 0 ? lsizes[3] : 1)*dof;
630:   if (write) {
631:     MPIU_File_write_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);
632:   } else {
633:     MPIU_File_read_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);
634:   }
635:   MPI_Type_get_extent(view,&ul,&ub);
636:   PetscViewerBinaryAddMPIIOOffset(viewer,ub);
637:   VecRestoreArrayRead(xin,&array);
638:   MPI_Type_free(&view);
639:   return 0;
640: }
641: #endif

643: PetscErrorCode  VecView_MPI_DA(Vec xin,PetscViewer viewer)
644: {
645:   DM                da;
646:   PetscInt          dim;
647:   Vec               natural;
648:   PetscBool         isdraw,isvtk,isglvis;
649: #if defined(PETSC_HAVE_HDF5)
650:   PetscBool         ishdf5;
651: #endif
652:   const char        *prefix,*name;
653:   PetscViewerFormat format;

655:   VecGetDM(xin,&da);
657:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
658:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
659: #if defined(PETSC_HAVE_HDF5)
660:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
661: #endif
662:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
663:   if (isdraw) {
664:     DMDAGetInfo(da,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
665:     if (dim == 1) {
666:       VecView_MPI_Draw_DA1d(xin,viewer);
667:     } else if (dim == 2) {
668:       VecView_MPI_Draw_DA2d(xin,viewer);
669:     } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot graphically view vector associated with this dimensional DMDA %D",dim);
670:   } else if (isvtk) {           /* Duplicate the Vec */
671:     Vec Y;
672:     VecDuplicate(xin,&Y);
673:     if (((PetscObject)xin)->name) {
674:       /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */
675:       PetscObjectSetName((PetscObject)Y,((PetscObject)xin)->name);
676:     }
677:     VecCopy(xin,Y);
678:     {
679:       PetscObject dmvtk;
680:       PetscBool   compatible,compatibleSet;
681:       PetscViewerVTKGetDM(viewer,&dmvtk);
682:       if (dmvtk) {
684:         DMGetCompatibility(da,(DM)dmvtk,&compatible,&compatibleSet);
686:       }
687:       PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_DEFAULT,PETSC_VTK_POINT_FIELD,PETSC_FALSE,(PetscObject)Y);
688:     }
689: #if defined(PETSC_HAVE_HDF5)
690:   } else if (ishdf5) {
691:     VecView_MPI_HDF5_DA(xin,viewer);
692: #endif
693:   } else if (isglvis) {
694:     VecView_GLVis(xin,viewer);
695:   } else {
696: #if defined(PETSC_HAVE_MPIIO)
697:     PetscBool isbinary,isMPIIO;

699:     PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
700:     if (isbinary) {
701:       PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
702:       if (isMPIIO) {
703:         DMDAArrayMPIIO(da,viewer,xin,PETSC_TRUE);
704:         return 0;
705:       }
706:     }
707: #endif

709:     /* call viewer on natural ordering */
710:     PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);
711:     DMDACreateNaturalVector(da,&natural);
712:     PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);
713:     DMDAGlobalToNaturalBegin(da,xin,INSERT_VALUES,natural);
714:     DMDAGlobalToNaturalEnd(da,xin,INSERT_VALUES,natural);
715:     PetscObjectGetName((PetscObject)xin,&name);
716:     PetscObjectSetName((PetscObject)natural,name);

718:     PetscViewerGetFormat(viewer,&format);
719:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
720:       /* temporarily remove viewer format so it won't trigger in the VecView() */
721:       PetscViewerPushFormat(viewer,PETSC_VIEWER_DEFAULT);
722:     }

724:     ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_TRUE;
725:     VecView(natural,viewer);
726:     ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_FALSE;

728:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
729:       MPI_Comm    comm;
730:       FILE        *info;
731:       const char  *fieldname;
732:       char        fieldbuf[256];
733:       PetscInt    dim,ni,nj,nk,pi,pj,pk,dof,n;

735:       /* set the viewer format back into the viewer */
736:       PetscViewerPopFormat(viewer);
737:       PetscObjectGetComm((PetscObject)viewer,&comm);
738:       PetscViewerBinaryGetInfoPointer(viewer,&info);
739:       DMDAGetInfo(da,&dim,&ni,&nj,&nk,&pi,&pj,&pk,&dof,NULL,NULL,NULL,NULL,NULL);
740:       PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
741:       PetscFPrintf(comm,info,"#$$ tmp = PetscBinaryRead(fd); \n");
742:       if (dim == 1) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d);\n",dof,ni); }
743:       if (dim == 2) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d);\n",dof,ni,nj); }
744:       if (dim == 3) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d,%d);\n",dof,ni,nj,nk); }

746:       for (n=0; n<dof; n++) {
747:         DMDAGetFieldName(da,n,&fieldname);
748:         if (!fieldname || !fieldname[0]) {
749:           PetscSNPrintf(fieldbuf,sizeof fieldbuf,"field%D",n);
750:           fieldname = fieldbuf;
751:         }
752:         if (dim == 1) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:))';\n",name,fieldname,n+1); }
753:         if (dim == 2) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:,:))';\n",name,fieldname,n+1); }
754:         if (dim == 3) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = permute(squeeze(tmp(%d,:,:,:)),[2 1 3]);\n",name,fieldname,n+1);}
755:       }
756:       PetscFPrintf(comm,info,"#$$ clear tmp; \n");
757:       PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
758:     }

760:     VecDestroy(&natural);
761:   }
762:   return 0;
763: }

765: #if defined(PETSC_HAVE_HDF5)
766: PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer)
767: {
768:   PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5*) viewer->data;
769:   DM             da;
770:   int            dim,rdim;
771:   hsize_t        dims[6]={0},count[6]={0},offset[6]={0};
772:   PetscBool      dim2=PETSC_FALSE,timestepping=PETSC_FALSE;
773:   PetscInt       dimension,timestep=PETSC_MIN_INT,dofInd;
774:   PetscScalar    *x;
775:   const char     *vecname;
776:   hid_t          filespace; /* file dataspace identifier */
777:   hid_t          dset_id;   /* dataset identifier */
778:   hid_t          memspace;  /* memory dataspace identifier */
779:   hid_t          file_id,group;
780:   hid_t          scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
781:   DM_DA          *dd;

783: #if defined(PETSC_USE_REAL_SINGLE)
784:   scalartype = H5T_NATIVE_FLOAT;
785: #elif defined(PETSC_USE_REAL___FLOAT128)
786: #error "HDF5 output with 128 bit floats not supported."
787: #elif defined(PETSC_USE_REAL___FP16)
788: #error "HDF5 output with 16 bit floats not supported."
789: #else
790:   scalartype = H5T_NATIVE_DOUBLE;
791: #endif

793:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
794:   PetscObjectGetName((PetscObject)xin,&vecname);
795:   PetscViewerHDF5CheckTimestepping_Internal(viewer, vecname);
796:   PetscViewerHDF5IsTimestepping(viewer, &timestepping);
797:   if (timestepping) {
798:     PetscViewerHDF5GetTimestep(viewer, &timestep);
799:   }
800:   VecGetDM(xin,&da);
801:   dd   = (DM_DA*)da->data;
802:   DMGetDimension(da, &dimension);

804:   /* Open dataset */
805:   PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));

807:   /* Retrieve the dataspace for the dataset */
808:   PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
809:   PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL));

811:   /* Expected dimension for holding the dof's */
812: #if defined(PETSC_USE_COMPLEX)
813:   dofInd = rdim-2;
814: #else
815:   dofInd = rdim-1;
816: #endif

818:   /* The expected number of dimensions, assuming basedimension2 = false */
819:   dim = dimension;
820:   if (dd->w > 1) ++dim;
821:   if (timestep >= 0) ++dim;
822: #if defined(PETSC_USE_COMPLEX)
823:   ++dim;
824: #endif

826:   /* In this case the input dataset have one extra, unexpected dimension. */
827:   if (rdim == dim+1) {
828:     /* In this case the block size unity */
829:     if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE;

831:     /* Special error message for the case where dof does not match the input file */

834:   /* Other cases where rdim != dim cannot be handled currently */

837:   /* Set up the hyperslab size */
838:   dim = 0;
839:   if (timestep >= 0) {
840:     offset[dim] = timestep;
841:     count[dim] = 1;
842:     ++dim;
843:   }
844:   if (dimension == 3) {
845:     PetscHDF5IntCast(dd->zs,offset + dim);
846:     PetscHDF5IntCast(dd->ze - dd->zs,count + dim);
847:     ++dim;
848:   }
849:   if (dimension > 1) {
850:     PetscHDF5IntCast(dd->ys,offset + dim);
851:     PetscHDF5IntCast(dd->ye - dd->ys,count + dim);
852:     ++dim;
853:   }
854:   PetscHDF5IntCast(dd->xs/dd->w,offset + dim);
855:   PetscHDF5IntCast((dd->xe - dd->xs)/dd->w,count + dim);
856:   ++dim;
857:   if (dd->w > 1 || dim2) {
858:     offset[dim] = 0;
859:     PetscHDF5IntCast(dd->w,count + dim);
860:     ++dim;
861:   }
862: #if defined(PETSC_USE_COMPLEX)
863:   offset[dim] = 0;
864:   count[dim] = 2;
865:   ++dim;
866: #endif

868:   /* Create the memory and filespace */
869:   PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
870:   PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

872:   VecGetArray(xin, &x);
873:   PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, hdf5->dxpl_id, x));
874:   VecRestoreArray(xin, &x);

876:   /* Close/release resources */
877:   if (group != file_id) {
878:     PetscStackCallHDF5(H5Gclose,(group));
879:   }
880:   PetscStackCallHDF5(H5Sclose,(filespace));
881:   PetscStackCallHDF5(H5Sclose,(memspace));
882:   PetscStackCallHDF5(H5Dclose,(dset_id));
883:   return 0;
884: }
885: #endif

887: PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer)
888: {
889:   DM             da;
890:   Vec            natural;
891:   const char     *prefix;
892:   PetscInt       bs;
893:   PetscBool      flag;
894:   DM_DA          *dd;
895: #if defined(PETSC_HAVE_MPIIO)
896:   PetscBool isMPIIO;
897: #endif

899:   VecGetDM(xin,&da);
900:   dd   = (DM_DA*)da->data;
901: #if defined(PETSC_HAVE_MPIIO)
902:   PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
903:   if (isMPIIO) {
904:     DMDAArrayMPIIO(da,viewer,xin,PETSC_FALSE);
905:     return 0;
906:   }
907: #endif

909:   PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);
910:   DMDACreateNaturalVector(da,&natural);
911:   PetscObjectSetName((PetscObject)natural,((PetscObject)xin)->name);
912:   PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);
913:   VecLoad(natural,viewer);
914:   DMDANaturalToGlobalBegin(da,natural,INSERT_VALUES,xin);
915:   DMDANaturalToGlobalEnd(da,natural,INSERT_VALUES,xin);
916:   VecDestroy(&natural);
917:   PetscInfo(xin,"Loading vector from natural ordering into DMDA\n");
918:   PetscOptionsGetInt(NULL,((PetscObject)xin)->prefix,"-vecload_block_size",&bs,&flag);
919:   if (flag && bs != dd->w) {
920:     PetscInfo(xin,"Block size in file %D not equal to DMDA's dof %D\n",bs,dd->w);
921:   }
922:   return 0;
923: }

925: PetscErrorCode  VecLoad_Default_DA(Vec xin, PetscViewer viewer)
926: {
927:   DM             da;
928:   PetscBool      isbinary;
929: #if defined(PETSC_HAVE_HDF5)
930:   PetscBool ishdf5;
931: #endif

933:   VecGetDM(xin,&da);

936: #if defined(PETSC_HAVE_HDF5)
937:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
938: #endif
939:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);

941:   if (isbinary) {
942:     VecLoad_Binary_DA(xin,viewer);
943: #if defined(PETSC_HAVE_HDF5)
944:   } else if (ishdf5) {
945:     VecLoad_HDF5_DA(xin,viewer);
946: #endif
947:   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
948:   return 0;
949: }