6

Sending 2D array in MPI

view full story
linux-howto

http://www.linuxforums.org – Hi, I have a problem with sending and receiving 2D array using MPI_Send and MPI_Recv. I've declared a struct type Pixel and a 2D array with this type. Now I want to send equal parts of the array to all processors. Here is the code: Code: #include <stdio.h> #include <stdlib.h> #include "mpi.h" #define N 8 #define M 6 #define LEN 5 #define CEN 3 typedef struct {         float data[LEN];         int x,y;         int class; } Pixel; int main(argc, argv)         int argc;         char *argv[]; { //Variable declarations int rank, numtasks, tag1, tag2, rc, dest, source=0, chunksize, i, j, k, offset; Pixel pixels[N][M]; Pixel centroids[CEN]; Pixel *p; MPI_Status status; MPI_Datatype pixel_type, oldtypes[2]; int blockcounts[2]; MPI_Aint offsets[2], extent; if( (rc=MPI_Init(&argc, &argv)) != MPI_SUCCESS ){         printf("Cannot initialize MPI_Init\n");         MPI_Abort(MPI_COMM_WORLD, rc); } MPI_Comm_size(MPI_COMM_WORLD, &numtasks); MPI_Comm_rank(MPI_COMM_WORLD, &rank); if(numtasks % 2 != 0) {   printf("Number of MPI tasks must be divisible by 2.\n");   MPI_Abort(MPI_COMM_WORLD, rc); } chunksize = M/numtasks; tag1=1; tag2=2; //Setup description of the MPI_FLOAT fields offsets[0] = 0; oldtypes[0] = MPI_FLOAT; blockcounts[0] = LEN; //Setup description of the MPI_INT fields MPI_Type_extent(MPI_FLOAT, &extent); offsets[1] = LEN * extent; oldtypes[1] = MPI_INT; blockcounts[1] = 3; //define the MPI struct MPI_Type_struct(2, blockcounts, offsets, oldtypes, &pixel_type); MPI_Type_commit(&pixel_type); //Initialize the array if(rank == 0){         for(i=0; i<N; i++){                 for(j=0; j<M; j++){                         pixels[i][j].x=j;                         pixels[i][j].y=i;                         pixels[i][j].class=0;                         for(k=0; k<LEN; k++)                                 pixels[i][j].data[k]=(i+j)*k;                 }         }         //Send each proc its part of the array         offset=chunksize;         for(dest=1; dest<numtasks; dest++){                 MPI_Send(&offset, 1, MPI_INT, dest, tag1, MPI_COMM_WORLD);                 for(i=offset; i<(offset+chunksize); i++){                         MPI_Send(&pixels[i][0], M, pixel_type, dest, tag2, MPI_COMM_WORLD);                                }                 offset+=chunksize;         } } if(rank > 0){         //All procs receive its data         MPI_Recv(&offset, 1, MPI_INT, source, tag1, MPI_COMM_WORLD, &status);         for(i=offset; i<(offset+chunksize); i++){                 MPI_Recv(&pixels[i][0], M, pixel_type, source, tag2, MPI_COMM_WORLD, &status);         } }         } I've googled this but couldn't find anything helpful, so I hope someone can help me here :) (HowTos)