New URL for NEMO forge!   http://forge.nemo-ocean.eu

Since March 2022 along with NEMO 4.2 release, the code development moved to a self-hosted GitLab.
This present forge is now archived and remained online for history.
Changeset 14363 for NEMO/branches/2021/dev_r14312_MPI_Interface/src/OCE/LBC/lib_mpp.F90 – NEMO

Ignore:
Timestamp:
2021-02-01T08:34:52+01:00 (3 years ago)
Author:
smasson
Message:

dev_r14312_MPI_Interface: suppress communications involving only land points, #2598

File:
1 edited

Legend:

Unmodified
Added
Removed
  • NEMO/branches/2021/dev_r14312_MPI_Interface/src/OCE/LBC/lib_mpp.F90

    r14349 r14363  
    139139 
    140140   ! Neighbourgs informations 
    141    INTEGER, DIMENSION(8), PUBLIC ::   mpinei     !: 8-neighbourg MPI indexes (starting at 0, -1 if no neighbourg) 
     141   INTEGER,    PARAMETER, PUBLIC ::   n_hlsmax = 3 
     142   INTEGER, DIMENSION(         8), PUBLIC ::   mpinei      !: 8-neighbourg MPI indexes (starting at 0, -1 if no neighbourg) 
     143   INTEGER, DIMENSION(n_hlsmax,8), PUBLIC ::   mpiSnei     !: 8-neighbourg Send MPI indexes (starting at 0, -1 if no neighbourg) 
     144   INTEGER, DIMENSION(n_hlsmax,8), PUBLIC ::   mpiRnei     !: 8-neighbourg Recv MPI indexes (starting at 0, -1 if no neighbourg) 
    142145   INTEGER,    PARAMETER, PUBLIC ::   jpwe = 1   !: WEst 
    143146   INTEGER,    PARAMETER, PUBLIC ::   jpea = 2   !: EAst 
     
    160163 
    161164   ! variables used for MPI3 neighbourhood collectives 
    162    INTEGER, PUBLIC ::   mpi_nc_com4       ! MPI3 neighbourhood collectives communicator 
    163    INTEGER, PUBLIC ::   mpi_nc_com8       ! MPI3 neighbourhood collectives communicator (with diagionals) 
     165   INTEGER, DIMENSION(n_hlsmax), PUBLIC ::   mpi_nc_com4       ! MPI3 neighbourhood collectives communicator 
     166   INTEGER, DIMENSION(n_hlsmax), PUBLIC ::   mpi_nc_com8       ! MPI3 neighbourhood collectives communicator (with diagionals) 
    164167 
    165168   ! North fold condition in mpp_mpi with jpni > 1 (PUBLIC for TAM) 
     
    11001103 
    11011104    
    1102    SUBROUTINE mpp_ini_nc 
     1105   SUBROUTINE mpp_ini_nc( khls ) 
    11031106      !!---------------------------------------------------------------------- 
    11041107      !!               ***  routine mpp_ini_nc  *** 
     
    11141117      !!         mpi_nc_com8 = MPI3 neighbourhood collectives communicator (with diagonals) 
    11151118      !!---------------------------------------------------------------------- 
    1116       INTEGER, DIMENSION(:), ALLOCATABLE :: inei4, inei8 
    1117       INTEGER :: icnt4, icnt8 
    1118       INTEGER :: ierr 
    1119       LOGICAL, PARAMETER :: ireord = .FALSE. 
     1119      INTEGER,             INTENT(in   ) ::   khls        ! halo size, default = nn_hls 
     1120      ! 
     1121      INTEGER, DIMENSION(:), ALLOCATABLE :: iSnei4, iRnei4, iSnei8, iRnei8 
     1122      INTEGER                            :: iScnt4, iRcnt4, iScnt8, iRcnt8 
     1123      INTEGER                            :: ierr 
     1124      LOGICAL, PARAMETER                 :: ireord = .FALSE. 
    11201125      !!---------------------------------------------------------------------- 
    11211126#if ! defined key_mpi_off && ! defined key_mpi2 
    11221127       
    1123       icnt4 = COUNT( mpinei(1:4) >= 0 ) 
    1124       icnt8 = COUNT( mpinei(1:8) >= 0 ) 
    1125  
    1126       ALLOCATE( inei4(icnt4), inei8(icnt8) )   ! ok if icnt4 or icnt8 = 0 
    1127  
    1128       inei4 = PACK( mpinei(1:4), mask = mpinei(1:4) >= 0 ) 
    1129       inei8 = PACK( mpinei(1:8), mask = mpinei(1:8) >= 0 ) 
    1130  
    1131       CALL MPI_Dist_graph_create_adjacent(mpi_comm_oce, icnt4, inei4, MPI_UNWEIGHTED,   & 
    1132          &                                              icnt4, inei4, MPI_UNWEIGHTED, MPI_INFO_NULL, ireord, mpi_nc_com4, ierr) 
    1133       CALL MPI_Dist_graph_create_adjacent(mpi_comm_oce, icnt8, inei8, MPI_UNWEIGHTED,   & 
    1134          &                                              icnt8, inei8, MPI_UNWEIGHTED, MPI_INFO_NULL, ireord, mpi_nc_com8, ierr) 
    1135  
    1136       DEALLOCATE (inei4, inei8) 
     1128      iScnt4 = COUNT( mpiSnei(khls,1:4) >= 0 ) 
     1129      iRcnt4 = COUNT( mpiRnei(khls,1:4) >= 0 ) 
     1130      iScnt8 = COUNT( mpiSnei(khls,1:8) >= 0 ) 
     1131      iRcnt8 = COUNT( mpiRnei(khls,1:8) >= 0 ) 
     1132 
     1133      ALLOCATE( iSnei4(iScnt4), iRnei4(iRcnt4), iSnei8(iScnt8), iRnei8(iRcnt8) )   ! ok if icnt4 or icnt8 = 0 
     1134 
     1135      iSnei4 = PACK( mpiSnei(khls,1:4), mask = mpiSnei(khls,1:4) >= 0 ) 
     1136      iRnei4 = PACK( mpiRnei(khls,1:4), mask = mpiRnei(khls,1:4) >= 0 ) 
     1137      iSnei8 = PACK( mpiSnei(khls,1:8), mask = mpiSnei(khls,1:8) >= 0 ) 
     1138      iRnei8 = PACK( mpiRnei(khls,1:8), mask = mpiRnei(khls,1:8) >= 0 ) 
     1139 
     1140      CALL MPI_Dist_graph_create_adjacent( mpi_comm_oce, iScnt4, iSnei4, MPI_UNWEIGHTED, iRcnt4, iRnei4, MPI_UNWEIGHTED,   & 
     1141         &                                 MPI_INFO_NULL, ireord, mpi_nc_com4(khls), ierr ) 
     1142      CALL MPI_Dist_graph_create_adjacent( mpi_comm_oce, iScnt8, iSnei8, MPI_UNWEIGHTED, iRcnt8, iRnei8, MPI_UNWEIGHTED,   & 
     1143         &                                 MPI_INFO_NULL, ireord, mpi_nc_com8(khls), ierr) 
     1144 
     1145      DEALLOCATE( iSnei4, iRnei4, iSnei8, iRnei8 ) 
    11371146#endif 
    11381147   END SUBROUTINE mpp_ini_nc 
Note: See TracChangeset for help on using the changeset viewer.