Actual source code: ex8.c
2: static char help[] = "Demonstrates using a local ordering to set values into a parallel vector.\n\n";
4: /*T
5: Concepts: vectors^assembling vectors with local ordering;
6: Processors: n
7: T*/
9: /*
10: Include "petscvec.h" so that we can use vectors. Note that this file
11: automatically includes:
12: petscsys.h - base PETSc routines petscis.h - index sets
13: petscviewer.h - viewers
14: */
15: #include <petscvec.h>
17: int main(int argc,char **argv)
18: {
19: PetscMPIInt rank;
20: PetscInt i,ng,*gindices,rstart,rend,M;
21: PetscScalar one = 1.0;
22: Vec x;
24: PetscInitialize(&argc,&argv,(char*)0,help);
25: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
27: /*
28: Create a parallel vector.
29: - In this case, we specify the size of each processor's local
30: portion, and PETSc computes the global size. Alternatively,
31: PETSc could determine the vector's distribution if we specify
32: just the global size.
33: */
34: VecCreate(PETSC_COMM_WORLD,&x);
35: VecSetSizes(x,rank+1,PETSC_DECIDE);
36: VecSetFromOptions(x);
37: VecSet(x,one);
39: /*
40: Set the local to global ordering for the vector. Each processor
41: generates a list of the global indices for each local index. Note that
42: the local indices are just whatever is convenient for a particular application.
43: In this case we treat the vector as lying on a one dimensional grid and
44: have one ghost point on each end of the blocks owned by each processor.
45: */
47: VecGetSize(x,&M);
48: VecGetOwnershipRange(x,&rstart,&rend);
49: ng = rend - rstart + 2;
50: PetscMalloc1(ng,&gindices);
51: gindices[0] = rstart - 1;
52: for (i=0; i<ng-1; i++) gindices[i+1] = gindices[i] + 1;
53: /* map the first and last point as periodic */
54: if (gindices[0] == -1) gindices[0] = M - 1;
55: if (gindices[ng-1] == M) gindices[ng-1] = 0;
56: {
57: ISLocalToGlobalMapping ltog;
58: ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,1,ng,gindices,PETSC_COPY_VALUES,<og);
59: VecSetLocalToGlobalMapping(x,ltog);
60: ISLocalToGlobalMappingDestroy(<og);
61: }
62: PetscFree(gindices);
64: /*
65: Set the vector elements.
66: - In this case set the values using the local ordering
67: - Each processor can contribute any vector entries,
68: regardless of which processor "owns" them; any nonlocal
69: contributions will be transferred to the appropriate processor
70: during the assembly process.
71: - In this example, the flag ADD_VALUES indicates that all
72: contributions will be added together.
73: */
74: for (i=0; i<ng; i++) {
75: VecSetValuesLocal(x,1,&i,&one,ADD_VALUES);
76: }
78: /*
79: Assemble vector, using the 2-step process:
80: VecAssemblyBegin(), VecAssemblyEnd()
81: Computations can be done while messages are in transition
82: by placing code between these two statements.
83: */
84: VecAssemblyBegin(x);
85: VecAssemblyEnd(x);
87: /*
88: View the vector; then destroy it.
89: */
90: VecView(x,PETSC_VIEWER_STDOUT_WORLD);
91: VecDestroy(&x);
93: PetscFinalize();
94: return 0;
95: }
97: /*TEST
99: test:
100: nsize: 4
102: TEST*/