Actual source code: ex14.c

petsc-3.15.0 2021-03-30
Report Typos and Errors

  2: static char help[]= "Test event log of VecScatter with various block sizes\n\n";

  4: #include <petscvec.h>

  6: int main(int argc,char **argv)
  7: {
  8:   PetscErrorCode     ierr;
  9:   PetscInt           i,j,low,high,n=256,N,errors,tot_errors;
 10:   PetscInt           bs=1,ix[2],iy[2];
 11:   PetscMPIInt        nproc,rank;
 12:   PetscScalar       *xval;
 13:   const PetscScalar *yval;
 14:   Vec                x,y;
 15:   IS                 isx,isy;
 16:   VecScatter         ctx;
 17:   const PetscInt     niter = 10;
 18: #if defined(PETSC_USE_LOG)
 19:   PetscLogStage      stage1,stage2;
 20:   PetscLogEvent      event1,event2;
 21:   PetscLogDouble     numMessages,messageLength;
 22:   PetscEventPerfInfo eventInfo;
 23:   PetscInt           tot_msg,tot_len,avg_len;
 24: #endif

 27:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 28:   PetscLogDefaultBegin();
 29:   MPI_Comm_size(PETSC_COMM_WORLD,&nproc);
 30:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);

 32:   PetscLogStageRegister("Scatter(bs=1)", &stage1);
 33:   PetscLogEventRegister("VecScatter(bs=1)", PETSC_OBJECT_CLASSID, &event1);
 34:   PetscLogStageRegister("Scatter(bs=4)", &stage2);
 35:   PetscLogEventRegister("VecScatter(bs=4)", PETSC_OBJECT_CLASSID, &event2);

 37:   /* Create a parallel vector x and a sequential vector y */
 38:   VecCreate(PETSC_COMM_WORLD,&x);
 39:   VecSetSizes(x,n,PETSC_DECIDE);
 40:   VecSetFromOptions(x);
 41:   VecGetOwnershipRange(x,&low,&high);
 42:   VecGetSize(x,&N);
 43:   VecCreateSeq(PETSC_COMM_SELF,n,&y);

 45:   /*=======================================
 46:      test VecScatter with bs = 1
 47:     ======================================*/

 49:   /* the code works as if we are going to do 3-point stencil computations on a 1D domain x,
 50:      which has periodic boundary conditions but the two ghosts are scatterred to beginning of y.
 51:    */
 52:   bs    = 1;
 53:   ix[0] = rank? low-1 : N-1; /* ix[] contains global indices of the two ghost points */
 54:   ix[1] = (rank != nproc-1)? high : 0;
 55:   iy[0] = 0;
 56:   iy[1] = 1;

 58:   ISCreateGeneral(PETSC_COMM_SELF,2,ix,PETSC_COPY_VALUES,&isx);
 59:   ISCreateGeneral(PETSC_COMM_SELF,2,iy,PETSC_COPY_VALUES,&isy);
 60:   VecScatterCreate(x,isx,y,isy,&ctx);
 61:   VecScatterSetUp(ctx);

 63:   PetscLogStagePush(stage1);
 64:   PetscLogEventBegin(event1,0,0,0,0);
 65:   errors = 0;
 66:   for (i=0; i<niter; i++) {
 67:     /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
 68:     VecGetArray(x,&xval);
 69:     for (j=0; j<n; j++) xval[j] = (PetscScalar)(low+j+i);
 70:     VecRestoreArray(x,&xval);
 71:     /* scatter the ghosts to y */
 72:     VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
 73:     VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
 74:     /* check if y has correct values */
 75:     VecGetArrayRead(y,&yval);
 76:     if ((PetscInt)PetscRealPart(yval[0]) != ix[0]+i) errors++;
 77:     if ((PetscInt)PetscRealPart(yval[1]) != ix[1]+i) errors++;
 78:     VecRestoreArrayRead(y,&yval);
 79:   }
 80:   PetscLogEventEnd(event1,0,0,0,0);
 81:   PetscLogStagePop();

 83:   /* check if we found wrong values on any processors */
 84:   MPI_Allreduce(&errors,&tot_errors,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
 85:   if (tot_errors) { PetscPrintf(PETSC_COMM_WORLD,"Error: wrong values were scatterred in vecscatter with bs = %d\n",bs); }

 87:   /* print out event log of VecScatter(bs=1) */
 88: #if defined(PETSC_USE_LOG)
 89:   PetscLogEventGetPerfInfo(stage1,event1,&eventInfo);
 90:   MPI_Allreduce(&eventInfo.numMessages,  &numMessages,  1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
 91:   MPI_Allreduce(&eventInfo.messageLength,&messageLength,1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
 92:   tot_msg = (PetscInt)numMessages*0.5; /* two MPI calls (Send & Recv) per message */
 93:   tot_len = (PetscInt)messageLength*0.5;
 94:   avg_len = tot_msg? (PetscInt)(messageLength/numMessages) : 0;
 95:   /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
 96:   PetscPrintf(PETSC_COMM_WORLD,"VecScatter(bs=%d) has sent out %d messages, total %d bytes, with average length %d bytes\n",bs,tot_msg,tot_len,avg_len);
 97: #endif

 99:   ISDestroy(&isx);
100:   ISDestroy(&isy);
101:   VecScatterDestroy(&ctx);

103:   /*=======================================
104:      test VecScatter with bs = 4
105:     ======================================*/

107:   /* similar to the 3-point stencil above, except that this time a ghost is a block */
108:   bs    = 4; /* n needs to be a multiple of bs to make the following code work */
109:   ix[0] = rank? low/bs-1 : N/bs-1; /* ix[] contains global indices of the two ghost blocks */
110:   ix[1] = (rank != nproc-1)? high/bs : 0;
111:   iy[0] = 0;
112:   iy[1] = 1;

114:   ISCreateBlock(PETSC_COMM_SELF,bs,2,ix,PETSC_COPY_VALUES,&isx);
115:   ISCreateBlock(PETSC_COMM_SELF,bs,2,iy,PETSC_COPY_VALUES,&isy);

117:   VecScatterCreate(x,isx,y,isy,&ctx);
118:    /* Call SetUp explicitly, otherwise messages in implicit SetUp will be counted in events below */
119:   VecScatterSetUp(ctx);

121:   PetscLogStagePush(stage2);
122:   PetscLogEventBegin(event2,0,0,0,0);
123:   errors = 0;
124:   for (i=0; i<niter; i++) {
125:     /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
126:     VecGetArray(x,&xval);
127:     for (j=0; j<n; j++) xval[j] = (PetscScalar)(low+j+i);
128:     VecRestoreArray(x,&xval);
129:     /* scatter the ghost blocks to y */
130:     VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
131:     VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);
132:     /* check if y has correct values */
133:     VecGetArrayRead(y,&yval);
134:     if ((PetscInt)PetscRealPart(yval[0])  != ix[0]*bs+i) errors++;
135:     if ((PetscInt)PetscRealPart(yval[bs]) != ix[1]*bs+i) errors++;
136:     VecRestoreArrayRead(y,&yval);
137:   }
138:   PetscLogEventEnd(event2,0,0,0,0);
139:   PetscLogStagePop();

141:   /* check if we found wrong values on any processors */
142:   MPI_Allreduce(&errors,&tot_errors,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
143:   if (tot_errors) { PetscPrintf(PETSC_COMM_WORLD,"Error: wrong values were scatterred in vecscatter with bs = %d\n",bs); }

145:   /* print out event log of VecScatter(bs=4) */
146: #if defined(PETSC_USE_LOG)
147:   PetscLogEventGetPerfInfo(stage2,event2,&eventInfo);
148:   MPI_Allreduce(&eventInfo.numMessages,  &numMessages,  1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
149:   MPI_Allreduce(&eventInfo.messageLength,&messageLength,1,MPIU_PETSCLOGDOUBLE,MPI_SUM,PETSC_COMM_WORLD);
150:   tot_msg = (PetscInt)numMessages*0.5; /* two MPI calls (Send & Recv) per message */
151:   tot_len = (PetscInt)messageLength*0.5;
152:   avg_len = tot_msg? (PetscInt)(messageLength/numMessages) : 0;
153:   /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
154:   PetscPrintf(PETSC_COMM_WORLD,"VecScatter(bs=%d) has sent out %d messages, total %d bytes, with average length %d bytes\n",bs,tot_msg,tot_len,avg_len);
155: #endif

157:   PetscPrintf(PETSC_COMM_WORLD,"Program finished\n");
158:   ISDestroy(&isx);
159:   ISDestroy(&isy);
160:   VecScatterDestroy(&ctx);
161:   VecDestroy(&x);
162:   VecDestroy(&y);
163:   PetscFinalize();
164:   return ierr;
165: }

167: /*TEST

169:    test:
170:       nsize: 4
171:       args:
172:       requires: double define(PETSC_USE_LOG)

174: TEST*/