- Timestamp:
- 10/04/17 11:45:14 (7 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
XIOS/dev/branch_openmp/extern/src_ep_dev/ep_intercomm_kernel.cpp
r1185 r1287 10 10 int MPI_Intercomm_create_kernel(MPI_Comm local_comm, int local_leader, MPI_Comm peer_comm, int remote_leader, int tag, MPI_Comm *newintercomm) 11 11 { 12 13 12 int ep_rank, ep_rank_loc, mpi_rank; 14 13 int ep_size, num_ep, mpi_size; … … 36 35 37 36 38 ::MPI_Comm_rank( MPI_COMM_WORLD_STD, &rank_in_world);37 ::MPI_Comm_rank(static_cast< ::MPI_Comm>(MPI_COMM_WORLD.mpi_comm), &rank_in_world); 39 38 ::MPI_Comm_rank(static_cast< ::MPI_Comm>(local_comm.mpi_comm), &rank_in_local_parent); 40 39 … … 80 79 send_buf[2] = num_ep; 81 80 82 ::MPI_Allgather(send_buf.data(), 3, MPI_INT_STD, recv_buf.data(), 3, MPI_INT_STD, local_mpi_comm);81 ::MPI_Allgather(send_buf.data(), 3, static_cast< ::MPI_Datatype> (MPI_INT), recv_buf.data(), 3, static_cast< ::MPI_Datatype> (MPI_INT), local_mpi_comm); 83 82 84 83 for(int i=0; i<size_info[0]; i++) … … 105 104 MPI_Status sta_send, sta_recv; 106 105 107 MPI_Isend(send_buf.data(), 3, MPI_INT_STD, remote_leader, tag, peer_comm, &req_send);108 MPI_Irecv(recv_buf.data(), 3, MPI_INT_STD, remote_leader, tag, peer_comm, &req_recv);106 MPI_Isend(send_buf.data(), 3, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag, peer_comm, &req_send); 107 MPI_Irecv(recv_buf.data(), 3, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag, peer_comm, &req_recv); 109 108 110 109 … … 118 117 } 119 118 119 120 120 121 send_buf[0] = size_info[1]; 121 122 send_buf[1] = leader_info[0]; … … 124 125 send_buf[4] = rank_in_peer_mpi[1]; 125 126 126 ::MPI_Bcast(send_buf.data(), 5, MPI_INT_STD, local_comm.rank_map->at(local_leader).second, local_mpi_comm);127 ::MPI_Bcast(send_buf.data(), 5, static_cast< ::MPI_Datatype> (MPI_INT), local_comm.rank_map->at(local_leader).second, local_mpi_comm); 127 128 128 129 size_info[1] = send_buf[0]; … … 149 150 std::copy ( ep_info[0].data(), ep_info[0].data() + size_info[0], send_buf.begin() + 2*size_info[0] ); 150 151 151 MPI_Send(send_buf.data(), 3*size_info[0], MPI_INT_STD, remote_leader, tag+1, peer_comm);152 MPI_Recv(recv_buf.data(), 3*size_info[1], MPI_INT_STD, remote_leader, tag+1, peer_comm, &status);153 } 154 155 ::MPI_Bcast(recv_buf.data(), 3*size_info[1], MPI_INT_STD, local_comm.rank_map->at(local_leader).second, local_mpi_comm);152 MPI_Send(send_buf.data(), 3*size_info[0], static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag+1, peer_comm); 153 MPI_Recv(recv_buf.data(), 3*size_info[1], static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag+1, peer_comm, &status); 154 } 155 156 ::MPI_Bcast(recv_buf.data(), 3*size_info[1], static_cast< ::MPI_Datatype> (MPI_INT), local_comm.rank_map->at(local_leader).second, local_mpi_comm); 156 157 157 158 std::copy ( recv_buf.data(), recv_buf.data() + size_info[1], rank_info[2].begin() ); … … 270 271 size_info[2] = new_ep_info[0].size(); 271 272 MPI_Status status; 272 MPI_Send(&size_info[2], 1, MPI_INT_STD, remote_leader, tag+2, peer_comm);273 MPI_Recv(&size_info[3], 1, MPI_INT_STD, remote_leader, tag+2, peer_comm, &status);274 } 275 276 ::MPI_Bcast(&size_info[2], 2, MPI_INT_STD, local_comm.rank_map->at(local_leader).second, local_mpi_comm);273 MPI_Send(&size_info[2], 1, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag+2, peer_comm); 274 MPI_Recv(&size_info[3], 1, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag+2, peer_comm, &status); 275 } 276 277 ::MPI_Bcast(&size_info[2], 2, static_cast< ::MPI_Datatype> (MPI_INT), local_comm.rank_map->at(local_leader).second, local_mpi_comm); 277 278 278 279 new_rank_info[2].resize(size_info[3]); … … 291 292 std::copy ( new_ep_info[0].data(), new_ep_info[0].data() + size_info[0], send_buf.begin() + 2*size_info[2] ); 292 293 293 MPI_Send(send_buf.data(), 3*size_info[2], MPI_INT_STD, remote_leader, tag+3, peer_comm);294 MPI_Recv(recv_buf.data(), 3*size_info[3], MPI_INT_STD, remote_leader, tag+3, peer_comm, &status);295 } 296 297 ::MPI_Bcast(recv_buf.data(), 3*size_info[3], MPI_INT_STD, local_comm.rank_map->at(local_leader).second, local_mpi_comm);294 MPI_Send(send_buf.data(), 3*size_info[2], static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag+3, peer_comm); 295 MPI_Recv(recv_buf.data(), 3*size_info[3], static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag+3, peer_comm, &status); 296 } 297 298 ::MPI_Bcast(recv_buf.data(), 3*size_info[3], static_cast< ::MPI_Datatype> (MPI_INT), local_comm.rank_map->at(local_leader).second, local_mpi_comm); 298 299 299 300 std::copy ( recv_buf.data(), recv_buf.data() + size_info[3], new_rank_info[2].begin() ); … … 303 304 } 304 305 305 306 306 307 307 308 if(is_proc_master) … … 327 328 } 328 329 329 ::MPI_Bcast(&leader_info[2], 1, MPI_INT_STD, local_comm.rank_map->at(local_leader).second, local_mpi_comm);330 331 if(new_comm != MPI_COMM_NULL_STD)330 ::MPI_Bcast(&leader_info[2], 1, static_cast< ::MPI_Datatype> (MPI_INT), local_comm.rank_map->at(local_leader).second, local_mpi_comm); 331 332 if(new_comm != static_cast< ::MPI_Comm>(MPI_COMM_NULL.mpi_comm)) 332 333 { 333 334 … … 360 361 //printf("tag_list size = %lu\n", tag_list.size()); 361 362 } 362 363 364 } 365 366 367 MPI_Barrier_local(local_comm); 363 } 368 364 369 365 vector<int> bcast_buf(8); … … 374 370 } 375 371 376 MPI_Bcast(bcast_buf.data(), 8, MPI_INT_STD, local_leader, local_comm);372 MPI_Bcast(bcast_buf.data(), 8, static_cast< ::MPI_Datatype> (MPI_INT), local_leader, local_comm); 377 373 378 374 if(!is_local_leader) … … 399 395 } 400 396 401 MPI_Bcast(bcast_buf.data(), size_info[2]+size_info[1]+size_info[0]+1, MPI_INT_STD, local_leader, local_comm);397 MPI_Bcast(bcast_buf.data(), size_info[2]+size_info[1]+size_info[0]+1, static_cast< ::MPI_Datatype> (MPI_INT), local_leader, local_comm); 402 398 403 399 if(!is_local_leader) … … 410 406 411 407 int my_position = offset[rank_in_local_parent]+ep_rank_loc; 412 413 408 414 409 MPI_Barrier_local(local_comm); 415 410 #pragma omp flush … … 425 420 if((*iter).first == make_pair(tag, min(leader_info[0], leader_info[1]))) 426 421 { 427 *newintercomm = 422 *newintercomm = iter->second[my_position]; 428 423 found = true; 429 424 break; … … 433 428 } 434 429 435 MPI_Barrier_local(local_comm); 430 MPI_Barrier(local_comm); 431 432 if(is_local_leader) 433 { 434 int local_flag = true; 435 int remote_flag = false; 436 MPI_Status mpi_status; 437 438 MPI_Send(&local_flag, 1, MPI_INT, remote_leader, tag, peer_comm); 439 440 MPI_Recv(&remote_flag, 1, MPI_INT, remote_leader, tag, peer_comm, &mpi_status); 441 } 442 443 444 MPI_Barrier(local_comm); 445 436 446 if(is_proc_master) 437 447 { … … 456 466 intercomm_mpi_size = newintercomm->ep_comm_ptr->size_rank_info[2].second; 457 467 458 MPI_Bcast(&remote_ep_size, 1, MPI_INT_STD, local_leader, local_comm);468 MPI_Bcast(&remote_ep_size, 1, static_cast< ::MPI_Datatype> (MPI_INT), local_leader, local_comm); 459 469 460 470 int my_rank_map_elem[2]; … … 470 480 (*newintercomm).ep_comm_ptr->intercomm->local_rank_map->resize(local_ep_size); 471 481 472 MPI_Allgather(my_rank_map_elem, 2, MPI_INT_STD, (*newintercomm).ep_comm_ptr->intercomm->local_rank_map->data(), 2, MPI_INT_STD, local_comm); 482 MPI_Allgather(my_rank_map_elem, 2, static_cast< ::MPI_Datatype> (MPI_INT), 483 (*newintercomm).ep_comm_ptr->intercomm->local_rank_map->data(), 2, static_cast< ::MPI_Datatype> (MPI_INT), local_comm); 473 484 474 485 (*newintercomm).ep_comm_ptr->intercomm->remote_rank_map = new RANK_MAP; … … 489 500 { 490 501 MPI_Status status; 491 MPI_Send((*newintercomm).ep_comm_ptr->intercomm->local_rank_map->data(), 2*local_ep_size, MPI_INT_STD, remote_leader, tag+4, peer_comm);492 MPI_Recv((*newintercomm).ep_comm_ptr->intercomm->remote_rank_map->data(), 2*remote_ep_size, MPI_INT_STD, remote_leader, tag+4, peer_comm, &status);493 494 MPI_Send(&local_intercomm_size, 1, MPI_INT_STD, remote_leader, tag+5, peer_comm);495 MPI_Recv(&remote_intercomm_size, 1, MPI_INT_STD, remote_leader, tag+5, peer_comm, &status);502 MPI_Send((*newintercomm).ep_comm_ptr->intercomm->local_rank_map->data(), 2*local_ep_size, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag+4, peer_comm); 503 MPI_Recv((*newintercomm).ep_comm_ptr->intercomm->remote_rank_map->data(), 2*remote_ep_size, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag+4, peer_comm, &status); 504 505 MPI_Send(&local_intercomm_size, 1, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag+5, peer_comm); 506 MPI_Recv(&remote_intercomm_size, 1, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag+5, peer_comm, &status); 496 507 497 508 new_bcast_root_0 = intercomm_ep_rank; 498 509 } 499 510 500 MPI_Allreduce(&new_bcast_root_0, &new_bcast_root, 1, MPI_INT_STD, MPI_SUM_STD, *newintercomm);501 502 503 MPI_Bcast((*newintercomm).ep_comm_ptr->intercomm->remote_rank_map->data(), 2*remote_ep_size, MPI_INT_STD, local_leader, local_comm);504 MPI_Bcast(&remote_intercomm_size, 1, MPI_INT_STD, new_bcast_root, *newintercomm);511 MPI_Allreduce(&new_bcast_root_0, &new_bcast_root, 1, static_cast< ::MPI_Datatype> (MPI_INT), static_cast< ::MPI_Op>(MPI_SUM), *newintercomm); 512 513 514 MPI_Bcast((*newintercomm).ep_comm_ptr->intercomm->remote_rank_map->data(), 2*remote_ep_size, static_cast< ::MPI_Datatype> (MPI_INT), local_leader, local_comm); 515 MPI_Bcast(&remote_intercomm_size, 1, static_cast< ::MPI_Datatype> (MPI_INT), new_bcast_root, *newintercomm); 505 516 506 517 … … 514 525 { 515 526 MPI_Status status; 516 MPI_Send((*newintercomm).rank_map->data(), 2*local_intercomm_size, MPI_INT_STD, remote_leader, tag+6, peer_comm);517 MPI_Recv((*newintercomm).ep_comm_ptr->intercomm->intercomm_rank_map->data(), 2*remote_intercomm_size, MPI_INT_STD, remote_leader, tag+6, peer_comm, &status);518 } 519 520 MPI_Bcast((*newintercomm).ep_comm_ptr->intercomm->intercomm_rank_map->data(), 2*remote_intercomm_size, MPI_INT_STD, new_bcast_root, *newintercomm);527 MPI_Send((*newintercomm).rank_map->data(), 2*local_intercomm_size, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag+6, peer_comm); 528 MPI_Recv((*newintercomm).ep_comm_ptr->intercomm->intercomm_rank_map->data(), 2*remote_intercomm_size, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag+6, peer_comm, &status); 529 } 530 531 MPI_Bcast((*newintercomm).ep_comm_ptr->intercomm->intercomm_rank_map->data(), 2*remote_intercomm_size, static_cast< ::MPI_Datatype> (MPI_INT), new_bcast_root, *newintercomm); 521 532 522 533 (*newintercomm).ep_comm_ptr->intercomm->local_comm = &(local_comm.ep_comm_ptr->comm_list[ep_rank_loc]); … … 579 590 int rank_in_peer_mpi[2]; 580 591 581 ::MPI_Comm_rank( MPI_COMM_WORLD_STD, &rank_in_world);592 ::MPI_Comm_rank(static_cast< ::MPI_Comm >(MPI_COMM_WORLD.mpi_comm), &rank_in_world); 582 593 583 594 … … 608 619 MPI_Request req_s, req_r; 609 620 610 MPI_Isend(send_buf.data(), 2, MPI_INT_STD, remote_leader, tag, peer_comm, &req_s);611 MPI_Irecv(recv_buf.data(), 2, MPI_INT_STD, remote_leader, tag, peer_comm, &req_r);621 MPI_Isend(send_buf.data(), 2, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag, peer_comm, &req_s); 622 MPI_Irecv(recv_buf.data(), 2, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag, peer_comm, &req_r); 612 623 613 624 … … 619 630 } 620 631 621 MPI_Bcast(recv_buf.data(), 3, MPI_INT_STD, local_leader, local_comm);632 MPI_Bcast(recv_buf.data(), 3, static_cast< ::MPI_Datatype> (MPI_INT), local_leader, local_comm); 622 633 623 634 remote_num_ep = recv_buf[0]; … … 660 671 MPI_Request req_s; 661 672 MPI_Status sta_s; 662 MPI_Isend(tag_label, 2, MPI_INT_STD, remote_leader, tag, peer_comm, &req_s);673 MPI_Isend(tag_label, 2, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag, peer_comm, &req_s); 663 674 664 675 MPI_Wait(&req_s, &sta_s); … … 674 685 MPI_Status status; 675 686 MPI_Request req_r; 676 MPI_Irecv(tag_label, 2, MPI_INT_STD, remote_leader, tag, peer_comm, &req_r);687 MPI_Irecv(tag_label, 2, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag, peer_comm, &req_r); 677 688 MPI_Wait(&req_r, &status); 678 689 } 679 690 } 680 691 681 MPI_Bcast(tag_label, 2, MPI_INT_STD, local_leader, local_comm);692 MPI_Bcast(tag_label, 2, static_cast< ::MPI_Datatype> (MPI_INT), local_leader, local_comm); 682 693 683 694 … … 745 756 local_rank_map_ele[1] = (*newintercomm).ep_comm_ptr->comm_label; 746 757 747 MPI_Allgather(local_rank_map_ele, 2, MPI_INT_STD, (*newintercomm).ep_comm_ptr->intercomm->local_rank_map->data(), 2, MPI_INT_STD, local_comm); 758 MPI_Allgather(local_rank_map_ele, 2, static_cast< ::MPI_Datatype> (MPI_INT), 759 (*newintercomm).ep_comm_ptr->intercomm->local_rank_map->data(), 2, static_cast< ::MPI_Datatype> (MPI_INT), local_comm); 748 760 749 761 if(ep_rank == local_leader) … … 752 764 MPI_Request req_s, req_r; 753 765 754 MPI_Isend((*newintercomm).ep_comm_ptr->intercomm->local_rank_map->data(), 2*local_num_ep, MPI_INT_STD, remote_leader, tag, peer_comm, &req_s);755 MPI_Irecv((*newintercomm).ep_comm_ptr->intercomm->remote_rank_map->data(), 2*remote_num_ep, MPI_INT_STD, remote_leader, tag, peer_comm, &req_r);766 MPI_Isend((*newintercomm).ep_comm_ptr->intercomm->local_rank_map->data(), 2*local_num_ep, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag, peer_comm, &req_s); 767 MPI_Irecv((*newintercomm).ep_comm_ptr->intercomm->remote_rank_map->data(), 2*remote_num_ep, static_cast< ::MPI_Datatype> (MPI_INT), remote_leader, tag, peer_comm, &req_r); 756 768 757 769 … … 761 773 } 762 774 763 MPI_Bcast((*newintercomm).ep_comm_ptr->intercomm->remote_rank_map->data(), 2*remote_num_ep, MPI_INT_STD, local_leader, local_comm);775 MPI_Bcast((*newintercomm).ep_comm_ptr->intercomm->remote_rank_map->data(), 2*remote_num_ep, static_cast< ::MPI_Datatype> (MPI_INT), local_leader, local_comm); 764 776 (*newintercomm).ep_comm_ptr->intercomm->local_comm = &(local_comm.ep_comm_ptr->comm_list[ep_rank_loc]); 765 777 (*newintercomm).ep_comm_ptr->intercomm->intercomm_tag = tag;
Note: See TracChangeset
for help on using the changeset viewer.