Commit dfc849f1 authored by David Goz's avatar David Goz 😴
Browse files

mpi/comp_comm/src/jacobi_2D_mpi_comp_comm.c openmpi MPI_Isendrecv subversion

parent f354993e
Loading
Loading
Loading
Loading
+43 −11
Original line number Diff line number Diff line
@@ -110,6 +110,15 @@ int main(int argc, char **argv)
      exit(EXIT_FAILURE);
    }

  if (rank == MASTERTASK)
    {
#if MPI_VERSION > 4
      printf("\n\t Using MPI_Isendrecv \n");
#else
      printf("\n\t Using MPI_Irecv / MPI_Isend \n");
#endif
    }

  /* get the comm size */
  MPI_Comm_size(ThisTask.comm2d, &ThisTask.nranks);
  
@@ -455,10 +464,13 @@ void Jacobi_Communication(MyData **const restrict Phi,
  const int data_row_size = ThisTask->domain.dim[Y];
  
  /* First task: issue the communication */
  MPI_Request request[4];

  MyData **const restrict buffer = Phi0;

#if MPI_VERSION > 4  

  MPI_Request request[4];

  MPI_Isendrecv(&buffer[ThisTask->domain.local_end[X]      ][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrtop,    0,
  		&buffer[ThisTask->domain.local_start[X] - 1][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrbottom, 0,
  		ThisTask->comm2d, &request[0]);
@@ -475,6 +487,22 @@ void Jacobi_Communication(MyData **const restrict Phi,
  		&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_end[Y]   + 1], 1,             column,         ThisTask->nbrright,  3,
  		ThisTask->comm2d, &request[3]);

#else
  
  MPI_Request request[8];

  MPI_Irecv(&buffer[ThisTask->domain.local_start[X] - 1][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrbottom, 0, ThisTask->comm2d, &request[0]);
  MPI_Irecv(&buffer[ThisTask->domain.local_end[X]   + 1][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrtop   , 1, ThisTask->comm2d, &request[1]);
  MPI_Irecv(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_start[Y] - 1], 1,             column,         ThisTask->nbrleft,   2, ThisTask->comm2d, &request[2]);
  MPI_Irecv(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_end[Y]   + 1], 1,             column,         ThisTask->nbrright,  3, ThisTask->comm2d, &request[3]);
  
  MPI_Isend(&buffer[ThisTask->domain.local_end[X]      ][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrtop,    0, ThisTask->comm2d, &request[4]);
  MPI_Isend(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_start[Y]    ], data_row_size, MPI_MyDatatype, ThisTask->nbrbottom, 1, ThisTask->comm2d, &request[5]);
  MPI_Isend(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_end[Y]      ], 1,             column,         ThisTask->nbrright,  2, ThisTask->comm2d, &request[6]);
  MPI_Isend(&buffer[ThisTask->domain.local_start[X]    ][ThisTask->domain.local_start[Y]    ], 1,             column,         ThisTask->nbrleft,   3, ThisTask->comm2d, &request[7]);

#endif

  /**************************************** computation ****************************************/
  /* perform the computation with the local data, (i.e. ghost cells are not required) */
  /* so overlapping computation and communication */
@@ -489,8 +517,12 @@ void Jacobi_Communication(MyData **const restrict Phi,

  /*********************************************************************************************/

#if MPI_VERSION > 4  
  /* wait the data on the boundaries */
  MPI_Waitall(4, request, MPI_STATUSES_IGNORE);
#else
  MPI_Waitall(8, request, MPI_STATUSES_IGNORE);
#endif
  
  /*  nbrbottom */
  JacobiAlgorithm(Phi, Phi0, delta,