New URL for NEMO forge!   http://forge.nemo-ocean.eu

Since March 2022 along with NEMO 4.2 release, the code development moved to a self-hosted GitLab.
This present forge is now archived and remained online for history.
Changeset 10180 for NEMO/branches/2018/dev_r10164_HPC09_ESIWACE_PREP_MERGE/src/OCE/LBC/lib_mpp.F90 – NEMO

Ignore:
Timestamp:
2018-10-08T17:06:04+02:00 (6 years ago)
Author:
smasson
Message:

dev_r10164_HPC09_ESIWACE_PREP_MERGE: action 4b: reduce communications in si3, see #2133

File:
1 edited

Legend:

Unmodified
Added
Removed
  • NEMO/branches/2018/dev_r10164_HPC09_ESIWACE_PREP_MERGE/src/OCE/LBC/lib_mpp.F90

    r10172 r10180  
    8585   PUBLIC   mpp_max_multiple 
    8686   PUBLIC   mppscatter, mppgather 
    87    PUBLIC   mpp_ini_ice, mpp_ini_znl 
     87   PUBLIC   mpp_ini_znl 
    8888   PUBLIC   mppsize 
    8989   PUBLIC   mppsend, mpprecv                          ! needed by TAM and ICB routines 
     
    133133 
    134134   INTEGER :: MPI_SUMDD 
    135  
    136    ! variables used in case of sea-ice 
    137    INTEGER, PUBLIC ::   ncomm_ice       !: communicator made by the processors with sea-ice (public so that it can be freed in icethd) 
    138    INTEGER         ::   ngrp_iworld     !  group ID for the world processors (for rheology) 
    139    INTEGER         ::   ngrp_ice        !  group ID for the ice processors (for rheology) 
    140    INTEGER         ::   ndim_rank_ice   !  number of 'ice' processors 
    141    INTEGER         ::   n_ice_root      !  number (in the comm_ice) of proc 0 in the ice comm 
    142    INTEGER, DIMENSION(:), ALLOCATABLE, SAVE ::   nrank_ice     ! dimension ndim_rank_ice 
    143135 
    144136   ! variables used for zonal integration 
     
    10111003 
    10121004 
    1013    SUBROUTINE mpp_ini_ice( pindic, kumout ) 
    1014       !!---------------------------------------------------------------------- 
    1015       !!               ***  routine mpp_ini_ice  *** 
    1016       !! 
    1017       !! ** Purpose :   Initialize special communicator for ice areas 
    1018       !!      condition together with global variables needed in the ddmpp folding 
    1019       !! 
    1020       !! ** Method  : - Look for ice processors in ice routines 
    1021       !!              - Put their number in nrank_ice 
    1022       !!              - Create groups for the world processors and the ice processors 
    1023       !!              - Create a communicator for ice processors 
    1024       !! 
    1025       !! ** output 
    1026       !!      njmppmax = njmpp for northern procs 
    1027       !!      ndim_rank_ice = number of processors with ice 
    1028       !!      nrank_ice (ndim_rank_ice) = ice processors 
    1029       !!      ngrp_iworld = group ID for the world processors 
    1030       !!      ngrp_ice = group ID for the ice processors 
    1031       !!      ncomm_ice = communicator for the ice procs. 
    1032       !!      n_ice_root = number (in the world) of proc 0 in the ice comm. 
    1033       !! 
    1034       !!---------------------------------------------------------------------- 
    1035       INTEGER, INTENT(in) ::   pindic 
    1036       INTEGER, INTENT(in) ::   kumout   ! ocean.output logical unit 
    1037       !! 
    1038       INTEGER :: jjproc 
    1039       INTEGER :: ii, ierr 
    1040       INTEGER, ALLOCATABLE, DIMENSION(:) ::   kice 
    1041       INTEGER, ALLOCATABLE, DIMENSION(:) ::   zwork 
    1042       !!---------------------------------------------------------------------- 
    1043       ! 
    1044       ALLOCATE( kice(jpnij), zwork(jpnij), STAT=ierr ) 
    1045       IF( ierr /= 0 ) THEN 
    1046          WRITE(kumout, cform_err) 
    1047          WRITE(kumout,*) 'mpp_ini_ice : failed to allocate 2, 1D arrays (jpnij in length)' 
    1048          CALL mppstop 
    1049       ENDIF 
    1050  
    1051       ! Look for how many procs with sea-ice 
    1052       ! 
    1053       kice = 0 
    1054       DO jjproc = 1, jpnij 
    1055          IF( jjproc == narea .AND. pindic .GT. 0 )   kice(jjproc) = 1 
    1056       END DO 
    1057       ! 
    1058       zwork = 0 
    1059       CALL MPI_ALLREDUCE( kice, zwork, jpnij, mpi_integer, mpi_sum, mpi_comm_oce, ierr ) 
    1060       ndim_rank_ice = SUM( zwork ) 
    1061  
    1062       ! Allocate the right size to nrank_north 
    1063       IF( ALLOCATED ( nrank_ice ) )   DEALLOCATE( nrank_ice ) 
    1064       ALLOCATE( nrank_ice(ndim_rank_ice) ) 
    1065       ! 
    1066       ii = 0 
    1067       nrank_ice = 0 
    1068       DO jjproc = 1, jpnij 
    1069          IF( zwork(jjproc) == 1) THEN 
    1070             ii = ii + 1 
    1071             nrank_ice(ii) = jjproc -1 
    1072          ENDIF 
    1073       END DO 
    1074  
    1075       ! Create the world group 
    1076       CALL MPI_COMM_GROUP( mpi_comm_oce, ngrp_iworld, ierr ) 
    1077  
    1078       ! Create the ice group from the world group 
    1079       CALL MPI_GROUP_INCL( ngrp_iworld, ndim_rank_ice, nrank_ice, ngrp_ice, ierr ) 
    1080  
    1081       ! Create the ice communicator , ie the pool of procs with sea-ice 
    1082       CALL MPI_COMM_CREATE( mpi_comm_oce, ngrp_ice, ncomm_ice, ierr ) 
    1083  
    1084       ! Find proc number in the world of proc 0 in the north 
    1085       ! The following line seems to be useless, we just comment & keep it as reminder 
    1086       ! CALL MPI_GROUP_TRANSLATE_RANKS(ngrp_ice,1,0,ngrp_iworld,n_ice_root,ierr) 
    1087       ! 
    1088       CALL MPI_GROUP_FREE(ngrp_ice, ierr) 
    1089       CALL MPI_GROUP_FREE(ngrp_iworld, ierr) 
    1090  
    1091       DEALLOCATE(kice, zwork) 
    1092       ! 
    1093    END SUBROUTINE mpp_ini_ice 
    1094  
    1095  
    10961005   SUBROUTINE mpp_ini_znl( kumout ) 
    10971006      !!---------------------------------------------------------------------- 
     
    16621571   LOGICAL, PUBLIC, PARAMETER ::   lk_mpp = .FALSE.      !: mpp flag 
    16631572   LOGICAL, PUBLIC            ::   ln_nnogather          !: namelist control of northfold comms (needed here in case "key_mpp_mpi" is not used) 
    1664    INTEGER :: ncomm_ice 
    16651573   INTEGER, PUBLIC            ::   mpi_comm_oce          ! opa local communicator 
    16661574   !!---------------------------------------------------------------------- 
     
    18151723      STOP      ! non MPP case, just stop the run 
    18161724   END SUBROUTINE mppstop 
    1817  
    1818    SUBROUTINE mpp_ini_ice( kcom, knum ) 
    1819       INTEGER :: kcom, knum 
    1820       WRITE(*,*) 'mpp_ini_ice: You should not have seen this print! error?', kcom, knum 
    1821    END SUBROUTINE mpp_ini_ice 
    18221725 
    18231726   SUBROUTINE mpp_ini_znl( knum ) 
Note: See TracChangeset for help on using the changeset viewer.