[1134] | 1 | #include "ep_lib.hpp" |
---|
| 2 | #include <mpi.h> |
---|
| 3 | #include "ep_declaration.hpp" |
---|
| 4 | |
---|
| 5 | using namespace std; |
---|
| 6 | |
---|
| 7 | namespace ep_lib |
---|
| 8 | { |
---|
| 9 | int MPI_Intercomm_create(MPI_Comm local_comm, int local_leader, MPI_Comm peer_comm, int remote_leader, int tag, MPI_Comm *newintercomm) |
---|
| 10 | { |
---|
| 11 | assert(local_comm.is_ep); |
---|
| 12 | |
---|
| 13 | int ep_rank, ep_rank_loc, mpi_rank; |
---|
| 14 | int ep_size, num_ep, mpi_size; |
---|
| 15 | |
---|
| 16 | ep_rank = local_comm.ep_comm_ptr->size_rank_info[0].first; |
---|
| 17 | ep_rank_loc = local_comm.ep_comm_ptr->size_rank_info[1].first; |
---|
| 18 | mpi_rank = local_comm.ep_comm_ptr->size_rank_info[2].first; |
---|
| 19 | ep_size = local_comm.ep_comm_ptr->size_rank_info[0].second; |
---|
| 20 | num_ep = local_comm.ep_comm_ptr->size_rank_info[1].second; |
---|
| 21 | mpi_size = local_comm.ep_comm_ptr->size_rank_info[2].second; |
---|
| 22 | |
---|
| 23 | |
---|
| 24 | MPI_Barrier(local_comm); |
---|
| 25 | |
---|
| 26 | |
---|
| 27 | |
---|
| 28 | int leader_ranks[6]; //! 0: rank in world, 1: mpi_size, 2: rank_in_peer. |
---|
| 29 | //! 3, 4, 5 : remote |
---|
| 30 | |
---|
| 31 | bool is_decider = false; |
---|
| 32 | |
---|
| 33 | |
---|
| 34 | if(ep_rank == local_leader) |
---|
| 35 | { |
---|
[1287] | 36 | MPI_Comm_rank(MPI_COMM_WORLD, &leader_ranks[0]); |
---|
[1134] | 37 | |
---|
| 38 | leader_ranks[1] = mpi_size; |
---|
| 39 | MPI_Comm_rank(peer_comm, &leader_ranks[2]); |
---|
| 40 | |
---|
[1287] | 41 | //printf("leader_ranks = %d, %d, %d\n", leader_ranks[0], leader_ranks[1], leader_ranks[2]); |
---|
| 42 | MPI_Request request[2]; |
---|
| 43 | MPI_Status status[2]; |
---|
| 44 | |
---|
| 45 | MPI_Isend(&leader_ranks[0], 3, static_cast< ::MPI_Datatype>(MPI_INT), remote_leader, tag, peer_comm, &request[0]); |
---|
| 46 | MPI_Irecv(&leader_ranks[3], 3, static_cast< ::MPI_Datatype>(MPI_INT), remote_leader, tag, peer_comm, &request[1]); |
---|
| 47 | |
---|
| 48 | MPI_Waitall(2, request, status); |
---|
[1328] | 49 | |
---|
| 50 | //MPI_Send(&leader_ranks[0], 3, static_cast< ::MPI_Datatype>(MPI_INT), remote_leader, tag, peer_comm); |
---|
| 51 | //MPI_Recv(&leader_ranks[3], 3, static_cast< ::MPI_Datatype>(MPI_INT), remote_leader, tag, peer_comm, &status[1]); |
---|
[1134] | 52 | } |
---|
| 53 | |
---|
| 54 | |
---|
[1287] | 55 | MPI_Bcast(leader_ranks, 6, static_cast< ::MPI_Datatype>(MPI_INT), local_leader, local_comm); |
---|
| 56 | |
---|
| 57 | |
---|
[1134] | 58 | MPI_Barrier(local_comm); |
---|
[1287] | 59 | |
---|
[1134] | 60 | |
---|
| 61 | if(leader_ranks[0] == leader_ranks[3]) |
---|
| 62 | { |
---|
| 63 | if( leader_ranks[1] * leader_ranks[4] == 1) |
---|
| 64 | { |
---|
| 65 | if(ep_rank == local_leader) Debug("calling MPI_Intercomm_create_unique_leader\n"); |
---|
| 66 | local_comm.ep_comm_ptr->comm_label = -99; |
---|
| 67 | |
---|
| 68 | return MPI_Intercomm_create_unique_leader(local_comm, local_leader, peer_comm, remote_leader, tag, newintercomm); |
---|
| 69 | } |
---|
| 70 | else // leader_ranks[1] * leader_ranks[4] != 1 |
---|
| 71 | { |
---|
| 72 | // change leader |
---|
| 73 | int new_local_leader; |
---|
| 74 | |
---|
| 75 | if(leader_ranks[2] < leader_ranks[5]) |
---|
| 76 | { |
---|
| 77 | if(leader_ranks[1] > 1) //! change leader |
---|
| 78 | { |
---|
| 79 | // change leader |
---|
| 80 | is_decider = true; |
---|
| 81 | int target = local_comm.rank_map->at(local_leader).second; |
---|
| 82 | { |
---|
| 83 | for(int i=0; i<ep_size; i++) |
---|
| 84 | { |
---|
| 85 | if(local_comm.rank_map->at(i).second != target && local_comm.rank_map->at(i).first == 0) |
---|
| 86 | { |
---|
| 87 | new_local_leader = i; |
---|
| 88 | break; |
---|
| 89 | } |
---|
| 90 | } |
---|
| 91 | } |
---|
| 92 | } |
---|
| 93 | else |
---|
| 94 | { |
---|
| 95 | new_local_leader = local_leader; |
---|
| 96 | } |
---|
| 97 | } |
---|
| 98 | else |
---|
| 99 | { |
---|
| 100 | if(leader_ranks[4] == 1) |
---|
| 101 | { |
---|
| 102 | // change leader |
---|
| 103 | is_decider = true; |
---|
| 104 | int target = local_comm.rank_map->at(local_leader).second; |
---|
| 105 | { |
---|
| 106 | for(int i=0; i<ep_size; i++) |
---|
| 107 | { |
---|
| 108 | if(local_comm.rank_map->at(i).second != target && local_comm.rank_map->at(i).first == 0) |
---|
| 109 | { |
---|
| 110 | new_local_leader = i; |
---|
| 111 | break; |
---|
| 112 | } |
---|
| 113 | } |
---|
| 114 | } |
---|
| 115 | } |
---|
| 116 | else |
---|
| 117 | { |
---|
| 118 | new_local_leader = local_leader; |
---|
| 119 | } |
---|
| 120 | } |
---|
| 121 | |
---|
| 122 | |
---|
| 123 | int new_tag_in_world; |
---|
| 124 | |
---|
| 125 | int leader_in_world[2]; |
---|
| 126 | |
---|
| 127 | |
---|
| 128 | if(is_decider) |
---|
| 129 | { |
---|
| 130 | if(ep_rank == new_local_leader) |
---|
| 131 | { |
---|
| 132 | new_tag_in_world = TAG++; |
---|
| 133 | } |
---|
[1287] | 134 | MPI_Bcast(&new_tag_in_world, 1, static_cast< ::MPI_Datatype> (MPI_INT), new_local_leader, local_comm); |
---|
| 135 | if(ep_rank == local_leader) MPI_Send(&new_tag_in_world, 1, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag, peer_comm); |
---|
[1134] | 136 | } |
---|
| 137 | else |
---|
| 138 | { |
---|
| 139 | if(ep_rank == local_leader) |
---|
| 140 | { |
---|
| 141 | MPI_Status status; |
---|
[1287] | 142 | MPI_Recv(&new_tag_in_world, 1, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag, peer_comm, &status); |
---|
[1134] | 143 | } |
---|
[1287] | 144 | MPI_Bcast(&new_tag_in_world, 1, static_cast< ::MPI_Datatype> (MPI_INT), new_local_leader, local_comm); |
---|
[1134] | 145 | } |
---|
| 146 | |
---|
| 147 | |
---|
| 148 | if(ep_rank == new_local_leader) |
---|
| 149 | { |
---|
[1287] | 150 | ::MPI_Comm_rank(static_cast< ::MPI_Comm >(MPI_COMM_WORLD.mpi_comm), &leader_in_world[0]); |
---|
[1134] | 151 | } |
---|
| 152 | |
---|
[1287] | 153 | MPI_Bcast(&leader_in_world[0], 1, static_cast< ::MPI_Datatype> (MPI_INT), new_local_leader, local_comm); |
---|
[1134] | 154 | |
---|
| 155 | |
---|
| 156 | if(ep_rank == local_leader) |
---|
| 157 | { |
---|
[1287] | 158 | MPI_Request request[2]; |
---|
| 159 | MPI_Status status[2]; |
---|
[1134] | 160 | |
---|
[1287] | 161 | MPI_Isend(&leader_in_world[0], 1, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag, peer_comm, &request[0]); |
---|
| 162 | MPI_Irecv(&leader_in_world[1], 1, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag, peer_comm, &request[1]); |
---|
[1134] | 163 | |
---|
[1287] | 164 | MPI_Waitall(2, request, status); |
---|
[1134] | 165 | } |
---|
| 166 | |
---|
| 167 | MPI_Bcast(&leader_in_world[1], 1, MPI_INT, local_leader, local_comm); |
---|
| 168 | |
---|
[1287] | 169 | local_comm.ep_comm_ptr->comm_label = tag; |
---|
[1134] | 170 | |
---|
[1287] | 171 | if(ep_rank == local_leader) Debug("calling MPI_Intercomm_create_from_world\n"); |
---|
[1134] | 172 | |
---|
[1287] | 173 | return MPI_Intercomm_create_from_world(local_comm, new_local_leader, static_cast< ::MPI_Comm >(MPI_COMM_WORLD.mpi_comm), leader_in_world[1], new_tag_in_world, newintercomm); |
---|
| 174 | |
---|
[1134] | 175 | } |
---|
| 176 | } |
---|
| 177 | |
---|
| 178 | if(ep_rank == local_leader) Debug("calling MPI_Intercomm_create_kernel\n"); |
---|
| 179 | |
---|
| 180 | return MPI_Intercomm_create_kernel(local_comm, local_leader, peer_comm, remote_leader, tag, newintercomm); |
---|
| 181 | |
---|
| 182 | } |
---|
| 183 | |
---|
| 184 | int MPI_Comm_test_inter(MPI_Comm comm, int *flag) |
---|
| 185 | { |
---|
| 186 | *flag = false; |
---|
| 187 | if(comm.is_ep) |
---|
| 188 | { |
---|
| 189 | *flag = comm.is_intercomm; |
---|
| 190 | return 0; |
---|
| 191 | } |
---|
[1287] | 192 | else if(comm.mpi_comm != static_cast< ::MPI_Comm>(MPI_COMM_NULL.mpi_comm)) |
---|
[1134] | 193 | { |
---|
| 194 | ::MPI_Comm mpi_comm = static_cast< ::MPI_Comm> (comm.mpi_comm); |
---|
| 195 | |
---|
| 196 | ::MPI_Comm_test_inter(mpi_comm, flag); |
---|
| 197 | return 0; |
---|
| 198 | } |
---|
| 199 | return 0; |
---|
| 200 | } |
---|
| 201 | |
---|
| 202 | |
---|
| 203 | } |
---|