New URL for NEMO forge!   http://forge.nemo-ocean.eu

Since March 2022 along with NEMO 4.2 release, the code development moved to a self-hosted GitLab.
This present forge is now archived and remained online for history.
Changeset 13899 for NEMO/branches/2020/tickets_icb_1900/src/OCE/LBC/lib_mpp.F90 – NEMO

Ignore:
Timestamp:
2020-11-27T17:26:33+01:00 (3 years ago)
Author:
mathiot
Message:

ticket #1900: update branch to trunk and add ICB test case

Location:
NEMO/branches/2020/tickets_icb_1900
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • NEMO/branches/2020/tickets_icb_1900

    • Property svn:externals
      •  

        old new  
        22^/utils/build/makenemo@HEAD   makenemo 
        33^/utils/build/mk@HEAD         mk 
        4 ^/utils/tools/@HEAD           tools 
         4^/utils/tools@HEAD            tools 
        55^/vendors/AGRIF/dev_r12970_AGRIF_CMEMS      ext/AGRIF 
        66^/vendors/FCM@HEAD            ext/FCM 
         
        88 
        99# SETTE 
        10 ^/utils/CI/sette@12931        sette 
         10^/utils/CI/sette@13559        sette 
  • NEMO/branches/2020/tickets_icb_1900/src/OCE/LBC/lib_mpp.F90

    r13226 r13899  
    7373   PUBLIC   tic_tac 
    7474#if ! defined key_mpp_mpi 
     75   PUBLIC MPI_wait 
    7576   PUBLIC MPI_Wtime 
    7677#endif 
     
    115116#else    
    116117   INTEGER, PUBLIC, PARAMETER ::   MPI_STATUS_SIZE = 1 
     118   INTEGER, PUBLIC, PARAMETER ::   MPI_REAL = 4 
    117119   INTEGER, PUBLIC, PARAMETER ::   MPI_DOUBLE_PRECISION = 8 
    118120   LOGICAL, PUBLIC, PARAMETER ::   lk_mpp = .FALSE.    !: mpp flag 
     
    509511            ALLOCATE(todelay(idvar)%y1d(isz)) 
    510512            todelay(idvar)%y1d(:) = CMPLX(todelay(idvar)%z1d(:), 0., wp)   ! create %y1d, complex variable needed by mpi_sumdd 
     513            ndelayid(idvar) = MPI_REQUEST_NULL                             ! initialised request to a valid value 
    511514         END IF 
    512515      ENDIF 
     
    516519         ALLOCATE(todelay(idvar)%z1d(isz), todelay(idvar)%y1d(isz))   ! allocate also %z1d as used for the restart 
    517520         CALL mpi_allreduce( y_in(:), todelay(idvar)%y1d(:), isz, MPI_DOUBLE_COMPLEX, mpi_sumdd, ilocalcomm, ierr )   ! get %y1d 
    518          todelay(idvar)%z1d(:) = REAL(todelay(idvar)%y1d(:), wp)      ! define %z1d from %y1d 
    519       ENDIF 
    520  
    521       IF( ndelayid(idvar) > 0 )   CALL mpp_delay_rcv( idvar )         ! make sure %z1d is received 
     521         ndelayid(idvar) = MPI_REQUEST_NULL 
     522      ENDIF 
     523 
     524      CALL mpp_delay_rcv( idvar )         ! make sure %z1d is received 
    522525 
    523526      ! send back pout from todelay(idvar)%z1d defined at previous call 
     
    528531      IF( ln_timing ) CALL tic_tac( .TRUE., ld_global = .TRUE.) 
    529532      CALL  mpi_allreduce( y_in(:), todelay(idvar)%y1d(:), isz, MPI_DOUBLE_COMPLEX, mpi_sumdd, ilocalcomm, ierr ) 
    530       ndelayid(idvar) = 1 
     533      ndelayid(idvar) = MPI_REQUEST_NULL 
    531534      IF( ln_timing ) CALL tic_tac(.FALSE., ld_global = .TRUE.) 
    532535# else 
     
    589592            DEALLOCATE(todelay(idvar)%z1d) 
    590593            ndelayid(idvar) = -1                                      ! do as if we had no restart 
     594         ELSE 
     595            ndelayid(idvar) = MPI_REQUEST_NULL 
    591596         END IF 
    592597      ENDIF 
     
    596601         ALLOCATE(todelay(idvar)%z1d(isz)) 
    597602         CALL mpi_allreduce( p_in(:), todelay(idvar)%z1d(:), isz, MPI_DOUBLE_PRECISION, mpi_max, ilocalcomm, ierr )   ! get %z1d 
    598       ENDIF 
    599  
    600       IF( ndelayid(idvar) > 0 )   CALL mpp_delay_rcv( idvar )         ! make sure %z1d is received 
     603         ndelayid(idvar) = MPI_REQUEST_NULL 
     604      ENDIF 
     605 
     606      CALL mpp_delay_rcv( idvar )         ! make sure %z1d is received 
    601607 
    602608      ! send back pout from todelay(idvar)%z1d defined at previous call 
     
    604610 
    605611      ! send p_in into todelay(idvar)%z1d with a non-blocking communication 
     612      ! (PM) Should we get rid of MPI2 option ? MPI3 was release in 2013. Who is still using MPI2 ? 
    606613# if defined key_mpi2 
    607614      IF( ln_timing ) CALL tic_tac( .TRUE., ld_global = .TRUE.) 
    608       CALL  mpi_allreduce( p_in(:), todelay(idvar)%z1d(:), isz, MPI_TYPE, mpi_max, ilocalcomm, ndelayid(idvar), ierr ) 
     615      CALL  mpi_allreduce( p_in(:), todelay(idvar)%z1d(:), isz, MPI_TYPE, mpi_max, ilocalcomm, ierr ) 
    609616      IF( ln_timing ) CALL tic_tac(.FALSE., ld_global = .TRUE.) 
    610617# else 
     
    629636      !!---------------------------------------------------------------------- 
    630637#if defined key_mpp_mpi 
    631       IF( ndelayid(kid) /= -2 ) THEN   
    632 #if ! defined key_mpi2 
    633          IF( ln_timing ) CALL tic_tac( .TRUE., ld_global = .TRUE.) 
    634          CALL mpi_wait( ndelayid(kid), MPI_STATUS_IGNORE, ierr )                        ! make sure todelay(kid) is received 
    635          IF( ln_timing ) CALL tic_tac(.FALSE., ld_global = .TRUE.) 
    636 #endif 
    637          IF( ASSOCIATED(todelay(kid)%y1d) )   todelay(kid)%z1d(:) = REAL(todelay(kid)%y1d(:), wp)  ! define %z1d from %y1d 
    638          ndelayid(kid) = -2   ! add flag to know that mpi_wait was already called on kid 
    639       ENDIF 
     638      IF( ln_timing ) CALL tic_tac( .TRUE., ld_global = .TRUE.) 
     639      ! test on ndelayid(kid) useless as mpi_wait return immediatly if the request handle is MPI_REQUEST_NULL 
     640      CALL mpi_wait( ndelayid(kid), MPI_STATUS_IGNORE, ierr ) ! after this ndelayid(kid) = MPI_REQUEST_NULL 
     641      IF( ln_timing ) CALL tic_tac( .FALSE., ld_global = .TRUE.) 
     642      IF( ASSOCIATED(todelay(kid)%y1d) )   todelay(kid)%z1d(:) = REAL(todelay(kid)%y1d(:), wp)  ! define %z1d from %y1d 
    640643#endif 
    641644   END SUBROUTINE mpp_delay_rcv 
     
    10981101      ! Look for how many procs on the northern boundary 
    10991102      ndim_rank_north = 0 
    1100       DO jjproc = 1, jpnij 
    1101          IF( njmppt(jjproc) == njmppmax )   ndim_rank_north = ndim_rank_north + 1 
     1103      DO jjproc = 1, jpni 
     1104         IF( nfproc(jjproc) /= -1 )   ndim_rank_north = ndim_rank_north + 1 
    11021105      END DO 
    11031106      ! 
     
    11091112      ! Note : the rank start at 0 in MPI 
    11101113      ii = 0 
    1111       DO ji = 1, jpnij 
    1112          IF ( njmppt(ji) == njmppmax   ) THEN 
     1114      DO ji = 1, jpni 
     1115         IF ( nfproc(ji) /= -1   ) THEN 
    11131116            ii=ii+1 
    1114             nrank_north(ii)=ji-1 
     1117            nrank_north(ii)=nfproc(ji) 
    11151118         END IF 
    11161119      END DO 
Note: See TracChangeset for help on using the changeset viewer.