Presentation is loading. Please wait.

Presentation is loading. Please wait.

Collective Communications Solution. #include #define N 300 int main(int argc, char **argv) { int i, target;/*local variables*/ int b[N], a[N/4];/*a is.

Similar presentations


Presentation on theme: "Collective Communications Solution. #include #define N 300 int main(int argc, char **argv) { int i, target;/*local variables*/ int b[N], a[N/4];/*a is."— Presentation transcript:

1 Collective Communications Solution

2 #include #define N 300 int main(int argc, char **argv) { int i, target;/*local variables*/ int b[N], a[N/4];/*a is name of the array each slave searches*/ int rank, size, err; MPI_Status status; int end_cnt; int gi;/*global index*/ float ave;/*average*/ FILE *sourceFile; FILE *destinationFile;

3 Solution int blocklengths[2] = {1, 1};/* initialize blocklengths array */ MPI_Datatype types[2] = {MPI_INT, MPI_FLOAT};/* initialize types array */ MPI_Aint displacements[2]; MPI_Datatype MPI_Pair; err = MPI_Init(&argc, &argv); err = MPI_Comm_rank(MPI_COMM_WORLD, &rank); err = MPI_Comm_size(MPI_COMM_WORLD, &size);

4 Solution /* Initialize displacements array with */ err = MPI_Address(&gi, &displacements[0]); /* memory addresses */ err = MPI_Address(&ave, &displacements[1]); /* This routine creates the new data type MPI_Pair */ err = MPI_Type_struct(2, blocklengths, displacements, types, &MPI_Pair); /* This routine allows it to be used in communication */ err = MPI_Type_commit(&MPI_Pair); if(size != 4) { printf("Error: You must use 4 processes to run this program.\n"); return 1; }

5 Solution if (rank == 0) { /* File b.data has the target value on the first line */ /* The remaining 300 lines of b.data have the values for the b array */ sourceFile = fopen("b.data", "r"); /* File found.data will contain the indices of b where the target is */ destinationFile = fopen("found.data", "w"); if(sourceFile==NULL) { printf("Error: can't access file.c.\n"); return 1; } else if(destinationFile==NULL) { printf("Error: can't create file for writing.\n"); return 1; } else { /* Read in the target */ fscanf(sourceFile, "%d", &target); }

6 Solution /*Notice the broadcast is outside of the if, all processors must call it*/ err = MPI_Bcast(&target, 1, MPI_INT, 0, MPI_COMM_WORLD); if (rank == 0) { /* Read in b array */ for (i=0; i<N; i++) { fscanf(sourceFile,"%d", &b[i]); } /* Again, the scatter is after the if, all processors must call it */ MPI_Scatter(b, N/size, MPI_INT, a, N/size, MPI_INT, 0, MPI_COMM_WORLD);

7 Solution if (rank == 0) { /*Master now searches the first fourth of the array for the target */ for (i=0; i<N/size; i++) { if (a[i] == target) { gi = (rank)*N/size+i+1; ave = (gi+target)/2.0; fprintf(destinationFile,"P %d, %d %f\n", rank, gi, ave); }

8 Solution end_cnt = 0; while (end_cnt != 3) { err = MPI_Recv(MPI_BOTTOM, 1, MPI_Pair, MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &status); if (status.MPI_TAG == 52) end_cnt++;/*See Comment*/ else fprintf(destinationFile,"P %d, %d %f\n", status.MPI_SOURCE, gi, ave); } fclose(sourceFile); fclose(destinationFile); }

9 Solution else { /* Search the b array and output the target locations */ for (i=0; i<N/size; i++) { if (a[i] == target) { gi = (rank)*N/size+i+1; /*Equation to convert local index to global index*/ ave = (gi+target)/2.0; err = MPI_Send(MPI_BOTTOM, 1, MPI_Pair, 0, 19, MPI_COMM_WORLD); } gi = target; /* Both are fake values */ ave=3.45; /* The point of this send is the "end" tag (See Chapter 4) */ err = MPI_Send(MPI_BOTTOM, 1, MPI_Pair, 0, 52, MPI_COMM_WORLD); /*See Comment*/ } err = MPI_Type_free(&MPI_Pair); err = MPI_Finalize(); return 0; }

10 Solution Note: The sections of code shown in red are new lines added to perform the broadcast of the target value. The sections of code shown in black are lines added to scatter the global array b among all four processors. Lastly, the section of code shown in blue is the new loop the master must execute to search its part (the first part) of the global array b.

11 Solution The results obtained from running this code are in the file "found.data" which contains the following: P 0, 62, 36. P 2, 183, 96.5 P 3, 271, 140.5 P 3, 291, 150.5 P 3, 296, 153. Notice that in this new parallel verison where all four processors search, the Master(P 0) finds the first location of the target, while processor 1 finds none.

12 END


Download ppt "Collective Communications Solution. #include #define N 300 int main(int argc, char **argv) { int i, target;/*local variables*/ int b[N], a[N/4];/*a is."

Similar presentations


Ads by Google