Actual source code: ex83.c


  2: static char help[] = "Partition tiny grid using hierarchical partitioning and increase overlap using MatIncreaseOverlapSplit.\n\n";

  4: /*T
  5:    Concepts: partitioning
  6:    Processors: 4
  7: T*/

  9: /*
 10:   Include "petscmat.h" so that we can use matrices.  Note that this file
 11:   automatically includes:
 12:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 13:      petscmat.h - matrices
 14:      petscis.h     - index sets
 15:      petscviewer.h - viewers
 16: */
 17: #include <petscmat.h>

 19: int main(int argc,char **args)
 20: {
 21:   Mat             A,B;
 22:   PetscMPIInt     rank,size,membershipKey;
 23:   PetscInt        *ia,*ja,*indices_sc,isrows_localsize;
 24:   const PetscInt  *indices;
 25:   MatPartitioning part;
 26:   IS              is,isrows,isrows_sc;
 27:   IS              coarseparts,fineparts;
 28:   MPI_Comm        comm,scomm;

 30:   PetscInitialize(&argc,&args,(char*)0,help);
 31:   comm = PETSC_COMM_WORLD;
 32:   MPI_Comm_size(comm,&size);
 34:   MPI_Comm_rank(comm,&rank);
 35:   /*set a small matrix */
 36:   PetscMalloc1(5,&ia);
 37:   PetscMalloc1(16,&ja);
 38:   if (rank == 0) {
 39:     ja[0] = 1; ja[1] = 4; ja[2] = 0; ja[3] = 2; ja[4] = 5; ja[5] = 1; ja[6] = 3; ja[7] = 6;
 40:     ja[8] = 2; ja[9] = 7;
 41:     ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
 42:     membershipKey = 0;
 43:   } else if (rank == 1) {
 44:     ja[0] = 0; ja[1] = 5; ja[2] = 8; ja[3] = 1; ja[4] = 4; ja[5] = 6; ja[6] = 9; ja[7] = 2;
 45:     ja[8] = 5; ja[9] = 7; ja[10] = 10; ja[11] = 3; ja[12] = 6; ja[13] = 11;
 46:     ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
 47:     membershipKey = 0;
 48:   } else if (rank == 2) {
 49:     ja[0] = 4; ja[1] = 9; ja[2] = 12; ja[3] = 5; ja[4] = 8; ja[5] = 10; ja[6] = 13; ja[7] = 6;
 50:     ja[8] = 9; ja[9] = 11; ja[10] = 14; ja[11] = 7; ja[12] = 10; ja[13] = 15;
 51:     ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
 52:     membershipKey = 1;
 53:   } else {
 54:     ja[0] = 8; ja[1] = 13; ja[2] = 9; ja[3] = 12; ja[4] = 14; ja[5] = 10; ja[6] = 13; ja[7] = 15;
 55:     ja[8] = 11; ja[9] = 14;
 56:     ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
 57:     membershipKey = 1;
 58:   }
 59:   MatCreateMPIAdj(comm,4,16,ia,ja,NULL,&A);
 60:   MatView(A,PETSC_VIEWER_STDOUT_WORLD);
 61:   /*
 62:    Partition the graph of the matrix
 63:   */
 64:   MatPartitioningCreate(comm,&part);
 65:   MatPartitioningSetAdjacency(part,A);
 66:   MatPartitioningSetType(part,MATPARTITIONINGHIERARCH);
 67:   MatPartitioningHierarchicalSetNcoarseparts(part,2);
 68:   MatPartitioningHierarchicalSetNfineparts(part,2);
 69:   MatPartitioningSetFromOptions(part);
 70:   /* get new processor owner number of each vertex */
 71:   MatPartitioningApply(part,&is);
 72:   /* coarse parts */
 73:   MatPartitioningHierarchicalGetCoarseparts(part,&coarseparts);
 74:   ISView(coarseparts,PETSC_VIEWER_STDOUT_WORLD);
 75:   /* fine parts */
 76:   MatPartitioningHierarchicalGetFineparts(part,&fineparts);
 77:   ISView(fineparts,PETSC_VIEWER_STDOUT_WORLD);
 78:   /* partitioning */
 79:   ISView(is,PETSC_VIEWER_STDOUT_WORLD);
 80:   /* compute coming rows */
 81:   ISBuildTwoSided(is,NULL,&isrows);
 82:   ISView(isrows,PETSC_VIEWER_STDOUT_WORLD);
 83:   /*create a sub-communicator */
 84:   MPI_Comm_split(comm, membershipKey,rank,&scomm);
 85:   ISGetLocalSize(isrows,&isrows_localsize);
 86:   PetscMalloc1(isrows_localsize,&indices_sc);
 87:   ISGetIndices(isrows,&indices);
 88:   PetscArraycpy(indices_sc,indices,isrows_localsize);
 89:   ISRestoreIndices(isrows,&indices);
 90:   ISDestroy(&is);
 91:   ISDestroy(&coarseparts);
 92:   ISDestroy(&fineparts);
 93:   ISDestroy(&isrows);
 94:   MatPartitioningDestroy(&part);
 95:   /*create a sub-IS on the sub communicator  */
 96:   ISCreateGeneral(scomm,isrows_localsize,indices_sc,PETSC_OWN_POINTER,&isrows_sc);
 97:   MatConvert(A,MATMPIAIJ,MAT_INITIAL_MATRIX,&B);
 98: #if 1
 99:   MatView(B,PETSC_VIEWER_STDOUT_WORLD);
100: #endif
101:   /*increase overlap */
102:   MatIncreaseOverlapSplit(B,1,&isrows_sc,1);
103:   ISView(isrows_sc,NULL);
104:   ISDestroy(&isrows_sc);
105:   /*
106:     Free work space.  All PETSc objects should be destroyed when they
107:     are no longer needed.
108:   */
109:   MatDestroy(&A);
110:   MatDestroy(&B);
111:   PetscFinalize();
112:   return 0;
113: }