/* comm_create.f -- builds a communicator from the first q processes in a communicator containing p = q^2 processes. Input: none Output: q -- program tests correct creation of new communicator by broadcasting the value 1 to its members -- all other processes have the value 0 -- global sum computed across all the processes. Note: Assumes that MPI_COMM_WORLD contains p = q^2 processes This example can be found in "Parallel Programming with MPI" by Peter Pacheco, Morgan Kaufmann Publishers. See Chap 7, pp. 117 & ff in PPMPI */ #include #include #include #include #define MAX_PROCS 100 int process_ranks[MAX_PROCS]; int main(int argc, char **argv) { int p; float p_real; int q ; int my_rank; MPI_Group group_world; MPI_Group first_row_group; MPI_Comm first_row_comm; int proc; int test; int sum; int my_rank_in_first_row; int ierr; test = 0; ierr = MPI_Init(&argc, &argv); ierr = MPI_Comm_rank(MPI_COMM_WORLD, &my_rank); ierr = MPI_Comm_size(MPI_COMM_WORLD, &p); p_real = p; q = sqrt(p_real); if ( q*q != p ) { if ( my_rank == 0 ) fprintf(stderr,"%d^2 != %d: program terminates\n",q,p); ierr = MPI_Finalize(); exit(-1); } /* Make a list of the processes in the new communicator */ for ( proc = 0; proc < q; proc++ ) process_ranks[proc] = proc; /* Get the group underlying MPI_COMM_WORLD */ ierr = MPI_Comm_group(MPI_COMM_WORLD, &group_world ); /* Create the new group */ ierr = MPI_Group_incl(group_world, q, process_ranks, &first_row_group); /* Create the new communicator */ ierr = MPI_Comm_create(MPI_COMM_WORLD, first_row_group, &first_row_comm); /* Now check whether we can do collective ops in first_row_comm */ if (my_rank < q) { ierr = MPI_Comm_rank(first_row_comm, &my_rank_in_first_row); if (my_rank_in_first_row == 0) test = 1; ierr = MPI_Bcast( &test, 1, MPI_INTEGER, 0, first_row_comm); } ierr = MPI_Reduce( &test, &sum, 1, MPI_INTEGER, MPI_SUM, 0, MPI_COMM_WORLD ); if (my_rank == 0) { fprintf(stdout,"q = %d, sum = %d \n",q,sum); } ierr = MPI_Finalize(); return(0); }