- Timestamp:
- 2019-11-20T22:48:28+01:00 (4 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
NEMO/branches/2019/dev_r11470_HPC_12_mpi3/src/OCE/LBC/lib_mpp.F90
r11799 r11940 25 25 !! 4.0 ! 2017 (G. Madec) automatique allocation of array argument (use any 3rd dimension) 26 26 !! - ! 2017 (G. Madec) create generic.h90 files to generate all lbc and north fold routines 27 !! - ! 2019 (S. Mocavero, I. Epicoco - CMCC) add MPI3 neighbourhood collectives 27 28 !!---------------------------------------------------------------------- 28 29 … … 50 51 !! mpp_ini_north : initialisation of north fold 51 52 !! mpp_lbc_north_icb : alternative to mpp_nfd for extra outer halo with icebergs 53 !! mpp_ini_nc : initialisation of MPI3 neighbourhood collectives communicator 52 54 !!---------------------------------------------------------------------- 53 55 USE dom_oce ! ocean space and time domain … … 64 66 PUBLIC mppscatter, mppgather 65 67 PUBLIC mpp_ini_znl 68 PUBLIC mpp_ini_nc 66 69 PUBLIC mppsend, mpprecv ! needed by TAM and ICB routines 67 70 PUBLIC mpp_report … … 132 135 INTEGER, PUBLIC :: north_root !: number (in the comm_opa) of proc 0 in the northern comm 133 136 INTEGER, PUBLIC, DIMENSION(:), ALLOCATABLE, SAVE :: nrank_north !: dimension ndim_rank_north 137 138 ! variables used for MPI3 neighbourhood collectives 139 INTEGER, PUBLIC :: mpi_nc_com ! MPI3 neighbourhood collectives communicator 140 INTEGER, PUBLIC, DIMENSION(:), ALLOCATABLE :: nranks 134 141 135 142 ! Communications summary report … … 800 807 801 808 END SUBROUTINE mpp_ini_znl 809 810 SUBROUTINE mpp_ini_nc 811 !!---------------------------------------------------------------------- 812 !! *** routine mpp_ini_nc *** 813 !! 814 !! ** Purpose : Initialize special communicator for MPI3 neighbourhood 815 !! collectives 816 !! 817 !! ** Method : - Create a cartesian communicator starting from the 818 !processes 819 !! distribution along i and j directions 820 ! 821 !! ** output 822 !! mpi_nc_com = MPI3 neighbourhood collectives communicator 823 !! 824 !!---------------------------------------------------------------------- 825 INTEGER, DIMENSION(2) :: ipdims 826 LOGICAL, PARAMETER :: ireord = .FALSE. 827 LOGICAL :: nperiod_nc ( 2 ) = .FALSE. 828 INTEGER :: mpi_nc_com_re, igroup, iprc, ingroup, iii, jjj 829 INTEGER :: ierr 830 831 #if defined key_mpp_mpi 832 ipdims(1) = jpni 833 ipdims(2) = jpnj 834 nperiod_nc(1) = .TRUE. 835 836 iprc = jpni*jpnj 837 838 ! Create a group from mpi_comm_oce 839 CALL MPI_Comm_group(mpi_comm_oce, igroup, ierr) 840 841 ! Create a group to reorder MPI rank 842 CALL MPI_Group_incl(igroup, iprc, nranks, ingroup, ierr) 843 844 ! Create reordered communicator 845 CALL MPI_Comm_create(mpi_comm_oce, ingroup, mpi_nc_com_re, ierr) 846 847 ! Create the cartesian communicator 848 CALL MPI_Cart_create(mpi_nc_com_re, 2, ipdims, nperiod_nc, ireord, mpi_nc_com, ierr) 849 DEALLOCATE (nranks) 850 #endif 851 END SUBROUTINE mpp_ini_nc 802 852 803 853
Note: See TracChangeset
for help on using the changeset viewer.