Actual source code: ex6.c

  1: static char help[]= "  Test VecScatter with x, y on different communicators\n\n";

  3: #include <petscvec.h>

  5: int main(int argc,char **argv)
  6: {
  7:   PetscInt           i,n=5,rstart;
  8:   PetscScalar        *val;
  9:   const PetscScalar  *dat;
 10:   Vec                x,y1,y2;
 11:   MPI_Comm           newcomm;
 12:   PetscMPIInt        nproc,rank;
 13:   IS                 ix;
 14:   VecScatter         vscat1,vscat2;

 16:   PetscInitialize(&argc,&argv,(char*)0,help);
 17:   MPI_Comm_size(PETSC_COMM_WORLD,&nproc);
 18:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);


 22:   /* Create MPI vectors x and y, which are on the same comm (i.e., MPI_IDENT) */
 23:   VecCreateMPI(PETSC_COMM_WORLD,n,PETSC_DECIDE,&x);
 24:   VecDuplicate(x,&y1);
 25:   VecGetOwnershipRange(x,&rstart,NULL);

 27:   /* Set x's value locally. x would be {0., 1., 2., ..., 9.} */
 28:   VecGetArray(x,&val);
 29:   for (i=0; i<n; i++) val[i] = rstart + i;
 30:   VecRestoreArray(x,&val);

 32:   /* Create index set ix = {0, 1, 2, ..., 9} */
 33:   ISCreateStride(PETSC_COMM_WORLD,n,rstart,1,&ix);

 35:   /* Create newcomm that reverses processes in x's comm, and then create y2 on it*/
 36:   MPI_Comm_split(PETSC_COMM_WORLD,0/*color*/,-rank/*key*/,&newcomm);
 37:   VecCreateMPI(newcomm,n,PETSC_DECIDE,&y2);

 39:   /* It looks vscat1/2 are the same, but actually not. y2 is on a different communicator than x */
 40:   VecScatterCreate(x,ix,y1,ix,&vscat1);
 41:   VecScatterCreate(x,ix,y2,ix,&vscat2);

 43:   VecScatterBegin(vscat1,x,y1,INSERT_VALUES,SCATTER_FORWARD);
 44:   VecScatterBegin(vscat2,x,y2,INSERT_VALUES,SCATTER_FORWARD);
 45:   VecScatterEnd  (vscat1,x,y1,INSERT_VALUES,SCATTER_FORWARD);
 46:   VecScatterEnd  (vscat2,x,y2,INSERT_VALUES,SCATTER_FORWARD);

 48:   /* View on rank 0 of x's comm, which is PETSC_COMM_WORLD */
 49:   if (rank == 0) {
 50:     /* Print the part of x on rank 0, which is 0 1 2 3 4 */
 51:     PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, x  = ");
 52:     VecGetArrayRead(x,&dat);
 53:     for (i=0; i<n; i++) PetscPrintf(PETSC_COMM_SELF," %.0g",(double)PetscRealPart(dat[i]));
 54:     VecRestoreArrayRead(x,&dat);

 56:     /* Print the part of y1 on rank 0, which is 0 1 2 3 4 */
 57:     PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, y1 = ");
 58:     VecGetArrayRead(y1,&dat);
 59:     for (i=0; i<n; i++) PetscPrintf(PETSC_COMM_SELF," %.0g",(double)PetscRealPart(dat[i]));
 60:     VecRestoreArrayRead(y1,&dat);

 62:     /* Print the part of y2 on rank 0, which is 5 6 7 8 9 since y2 swapped the processes of PETSC_COMM_WORLD */
 63:     PetscPrintf(PETSC_COMM_SELF,"\nOn rank 0 of PETSC_COMM_WORLD, y2 = ");
 64:     VecGetArrayRead(y2,&dat);
 65:     for (i=0; i<n; i++) PetscPrintf(PETSC_COMM_SELF," %.0g",(double)PetscRealPart(dat[i]));
 66:     VecRestoreArrayRead(y2,&dat);
 67:     PetscPrintf(PETSC_COMM_SELF,"\n");
 68:   }

 70:   ISDestroy(&ix);
 71:   VecDestroy(&x);
 72:   VecDestroy(&y1);
 73:   VecDestroy(&y2);
 74:   VecScatterDestroy(&vscat1);
 75:   VecScatterDestroy(&vscat2);
 76:   MPI_Comm_free(&newcomm);
 77:   PetscFinalize();
 78:   return 0;
 79: }

 81: /*TEST

 83:    test:
 84:       nsize: 2
 85:       requires: double
 86: TEST*/