MODULE cpl_oasis4 !!====================================================================== !! *** MODULE cpl_oasis4 *** !! Coupled O/A : coupled ocean-atmosphere case using OASIS4 !! special case: OPA/LIM coupled to ECHAM5 !!===================================================================== !! History : !! 9.0 ! 04-06 (R. Redler, NEC Laboratories Europe, St Augustin, Germany) Original code !! " " ! 04-11 (R. Redler, NEC Laboratories Europe; N. Keenlyside, W. Park, IFM-GEOMAR, Kiel, Germany) revision !! " " ! 04-11 (V. Gayler, MPI M&D) Grid writing !! " " ! 05-08 (R. Redler, W. Park) frld initialization, paral(2) revision !! " " ! 05-09 (R. Redler) extended to allow for communication over root only !! " " ! 05-09 (R. Redler) extended to allow for communication over root only !! " " ! 05-12 (R. Hill, Met. Office) Tweaks and hacks to get NEMO/O4 working !! " " ! 06-02 (R. Redler, W. Park) Bug fixes and updates according to the OASIS3 interface !! " " ! 06-02 (R. Redler) app/grid/grid_name from namelist !!---------------------------------------------------------------------- #if defined key_oasis4 !!---------------------------------------------------------------------- !! 'key_oasis4' coupled Ocean/Atmosphere via OASIS4 !!---------------------------------------------------------------------- !! cpl_prism_init : initialization of coupled mode communication !! cpl_prism_define : definition of grid and fields !! cpl_prism_send : send out fields in coupled mode !! cpl_prism_recv : receive fields in coupled mode !! cpl_prism_finalize : finalize the coupled mode communication !!---------------------------------------------------------------------- !! * Modules used !##################### WARNING coupled mode ############################### !##################### WARNING coupled mode ############################### ! Following line must be enabled if coupling with OASIS ! USE prism ! prism module !##################### WARNING coupled mode ############################### !##################### WARNING coupled mode ############################### #if defined key_mpp_mpi USE lib_mpp, only : mppsize, mpprank ! message passing USE lib_mpp, only : mppsend ! message passing USE lib_mpp, only : mpprecv ! message passing #endif USE dom_oce ! ocean space and time domain USE in_out_manager ! I/O manager USE par_oce ! USE phycst, only : rt0 ! freezing point of sea water USE oasis4_date ! OASIS4 date declarations in ! PRISM compatible format IMPLICIT NONE ! ! Exchange parameters for coupling ORCA-LIM with ECHAM5 ! #if defined key_cpl_ocevel INTEGER, PARAMETER :: nsend = 6 #else INTEGER, PARAMETER :: nsend = 4 #endif #if defined key_cpl_discharge INTEGER, PARAMETER :: nrecv = 20 #else INTEGER, PARAMETER :: nrecv = 17 #endif INTEGER, DIMENSION(nsend) :: send_id INTEGER, DIMENSION(nrecv) :: recv_id CHARACTER(len=32) :: cpl_send (nsend) CHARACTER(len=32) :: cpl_recv (nrecv) CHARACTER(len=16) :: app_name ! application name for OASIS use CHARACTER(len=16) :: comp_name ! name of this PRISM component CHARACTER(len=16) :: grid_name ! name of the grid CHARACTER(len=1) :: c_mpi_send ! The following now come in via new module oasis4_date ! TYPE(PRISM_Time_struct), PUBLIC :: dates ! date info for send operation ! TYPE(PRISM_Time_struct), PUBLIC :: dates_bound(2) ! date info for send operation ! TYPE(PRISM_Time_struct), PUBLIC :: dater ! date info for receive operation ! TYPE(PRISM_Time_struct), PUBLIC :: dater_bound(2) ! date info for receive operation ! TYPE(PRISM_Time_struct), PUBLIC :: tmpdate PRIVATE INTEGER, PARAMETER :: localRoot = 0 INTEGER :: localRank ! local MPI rank INTEGER :: localSize ! local MPI size INTEGER :: localComm ! local MPI size LOGICAL :: commRank ! true for ranks doing OASIS communication INTEGER :: comp_id ! id returned by prism_init_comp INTEGER :: range(5) LOGICAL, SAVE :: prism_was_initialized LOGICAL, SAVE :: prism_was_terminated INTEGER, SAVE :: write_grid INTEGER :: ierror ! return error code #ifdef key_cpl_rootexchg LOGICAL :: rootexchg =.true. ! logical switch #else LOGICAL :: rootexchg =.false. ! logical switch #endif REAL(wp), DIMENSION(:,:), ALLOCATABLE :: exfld ! Temporary buffer for exchange REAL(wp), DIMENSION(:), ALLOCATABLE :: buffer ! Temporary buffer for exchange INTEGER, DIMENSION(:,:), ALLOCATABLE :: ranges ! Temporary buffer for exchange DOUBLE PRECISION :: date_incr !! Routine accessibility PUBLIC cpl_prism_init PUBLIC cpl_prism_define PUBLIC cpl_prism_send PUBLIC cpl_prism_recv PUBLIC cpl_prism_finalize PUBLIC send_id, recv_id !!---------------------------------------------------------------------- !! OPA 9.0 , LOCEAN-IPSL (2006) !! $Id$ !! Software governed by the CeCILL licence (modipsl/doc/NEMO_CeCILL.txt) !!---------------------------------------------------------------------- CONTAINS SUBROUTINE cpl_prism_init( localCommunicator ) IMPLICIT NONE !!------------------------------------------------------------------- !! *** ROUTINE cpl_prism_init *** !! !! ** Purpose : Initialize coupled mode communication for ocean !! exchange between AGCM, OGCM and COUPLER. (OASIS4 software) !! !! ** Method : OASIS4 MPI communication !!-------------------------------------------------------------------- !! * Arguments !! INTEGER, INTENT(OUT) :: localCommunicator !! !! * Local declarations !! NAMELIST/nam_mpp/ app_name, comp_name, c_mpi_send, grid_name !! !!-------------------------------------------------------------------- !! IF(lwp) WRITE(numout,*) IF(lwp) WRITE(numout,*) 'cpl_prism_init : initialization in coupled ocean/atmosphere case' IF(lwp) WRITE(numout,*) '~~~~~~~~~~~~~~~~' IF(lwp) WRITE(numout,*) #if defined key_flx_bulk_monthly || defined key_flx_bulk_daily || defined key_flx_forced_daily #if !defined key_oasis4 ! The following is not necessarily a valid peice of checking IF(lwp) WRITE(numout,cform_err) IF(lwp) WRITE(numout,*) ' key_coupled and key_flx_bulk_* key_flx_forced_daily are incompatible' nstop = nstop + 1 #endif #endif REWIND( numnam ) READ ( numnam, nam_mpp ) REWIND( numnam ) !------------------------------------------------------------------ ! 1st Initialize the PRISM system for the application !------------------------------------------------------------------ CALL prism_initialized (prism_was_initialized, ierror) IF ( ierror /= PRISM_Success ) & CALL prism_abort( comp_id, 'OPA9.0', 'cpl_prism_init: Failure in prism_initialized' ) IF ( .NOT. prism_was_initialized ) THEN CALL prism_init( app_name, ierror ) IF ( ierror /= PRISM_Success ) & CALL prism_abort(comp_id, 'OPA9.0', 'cpl_prism_init: Failure in prism_init') prism_was_initialized = .true. ELSE call prism_abort(comp_id, 'OPA9.0', 'cpl_prism_init: Do not initialize prism twice!') ENDIF ! ! Obtain the actual dates and date bounds ! ! date is determined by adding days since beginning of ! the run to the corresponding initial date. Note that ! OPA internal info about the start date of the experiment ! is bypassed. Instead we rely sololy on the info provided ! by the SCC.xml file. ! dates = PRISM_Jobstart_date WRITE(6,*) "PRISM JOB START DATE IS", dates ! ! upper bound is determined by adding half a time step ! tmpdate = dates date_incr = rdttra(1)/2.0 CALL PRISM_calc_newdate ( tmpdate, date_incr, ierror ) dates_bound(2) = tmpdate ! ! lower bound is determined by half distance to date from previous run ! tmpdate = dates date_incr = ( adatrj - adatrj0 ) * 43200.0 CALL PRISM_calc_newdate ( tmpdate, date_incr, ierror ) dates_bound(1) = tmpdate dater = dates dater_bound(1) = dates_bound(1) dater_bound(2) = dates_bound(2) WRITE(6,*) "DATE send and rec BOUNDS",dater_bound WRITE(6,*) "OTHER BITS FOR DATE",rdttra(1) WRITE(6,*) "adatrj/0",adatrj,adatrj0,date_incr !------------------------------------------------------------------ ! 2nd Initialize the PRISM system for the component !------------------------------------------------------------------ CALL prism_init_comp ( comp_id, comp_name, ierror ) IF ( ierror /= PRISM_Success ) & CALL prism_abort (comp_id, 'OPA9.0', 'cpl_prism_init: Failure in prism_init_comp') WRITE(6,*) "COMPLETED INIT_COMP",comp_name,comp_id !------------------------------------------------------------------ ! 3rd Get an MPI communicator for OPA local communication !------------------------------------------------------------------ CALL prism_get_localcomm ( comp_id, localComm, ierror ) IF ( ierror /= PRISM_Success ) & CALL prism_abort (comp_id, 'OPA9.0', 'cpl_prism_init: Failure in prism_get_localcomm' ) localCommunicator = localComm WRITE(6,*) "COMPLETED GET_LOCALCOMM",comp_name,comp_id END SUBROUTINE cpl_prism_init SUBROUTINE cpl_prism_define () IMPLICIT NONE !!------------------------------------------------------------------- !! *** ROUTINE cpl_prism_define *** !! !! ** Purpose : Define grid and field information for ocean !! exchange between AGCM, OGCM and COUPLER. (OASIS4 software) !! !! ** Method : OASIS4 MPI communication !!-------------------------------------------------------------------- !! * Arguments !! !! * Local declarations INTEGER :: grid_id(2) ! id returned by prism_def_grid INTEGER :: upoint_id(2), & vpoint_id(2), & tpoint_id(2), & fpoint_id(2) ! ids returned by prism_set_points INTEGER :: umask_id(2), & vmask_id(2), & tmask_id(2), & fmask_id(2) ! ids returned by prism_set_mask INTEGER :: grid_type ! PRISM grid type INTEGER :: shape(2,3) ! shape of arrays passed to PSMILe INTEGER :: nodim(2) INTEGER :: data_type ! data type of transients INTEGER :: nbr_corners LOGICAL :: new_points LOGICAL :: new_mask LOGICAL :: mask(jpi,jpj,jpk) INTEGER :: ji, jj, jk ! local loop indicees CHARACTER(len=32) :: cpl_send (nsend) CHARACTER(len=32) :: cpl_recv (nrecv) CHARACTER(len=32) :: grid_name ! name of the grid CHARACTER(len=32) :: point_name ! name of the grid points REAL(kind=wp), ALLOCATABLE :: rclon(:,:,:) REAL(kind=wp), ALLOCATABLE :: rclat(:,:,:) REAL(kind=wp), ALLOCATABLE :: rcz (:,:) !!-------------------------------------------------------------------- IF(lwp) WRITE(numout,*) IF(lwp) WRITE(numout,*) 'cpl_prism_define : initialization in coupled ocean/atmosphere case' IF(lwp) WRITE(numout,*) '~~~~~~~~~~~~~~~~~' IF(lwp) WRITE(numout,*) #if defined key_flx_bulk_monthly || defined key_flx_bulk_daily || defined key_flx_forced_daily #if !defined key_oasis4 ! The problem with the following is that it ASSUMES we're only ever coupling to an atmosphere ! which is not necessarily the case. Prevent this test temporarily for NEMOGAM development. IF(lwp) WRITE(numout,cform_err) IF(lwp) WRITE(numout,*) ' key_coupled and key_flx_bulk_... are incompatible' nstop = nstop + 1 #endif #endif ! ----------------------------------------------------------------- ! ... Some initialisation ! ----------------------------------------------------------------- send_id = 0 recv_id = 0 #if defined key_mpp_mpi ! ----------------------------------------------------------------- ! ... Some MPI stuff relevant for optional exchange via root only ! ----------------------------------------------------------------- commRank = .false. localRank = mpprank ! from lib_mpp localSize = mppsize ! from lib_mpp IF(lwp) WRITE(numout,*) "CALLING DEFINE" IF ( rootexchg ) THEN IF ( localRank == localRoot ) commRank = .true. ELSE commRank = .true. ENDIF #else ! ! For non-parallel configurations the one and only process ("localRoot") ! takes part in the communication ! localRank = localRoot commRank = .true. #endif ! ----------------------------------------------------------------- ! ... Allocate memory for data exchange ! ----------------------------------------------------------------- IF(lwp) WRITE(numout,*) "Abbout to allocate exfld",jpi,jpj ALLOCATE(exfld(1:jpi,1:jpj), stat = ierror) IF (ierror > 0) THEN CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in allocating Reals') RETURN ENDIF IF ( rootexchg .and. localRank == localRoot ) THEN ALLOCATE(ranges(5,0:localSize-1), stat = ierror) IF (ierror > 0) THEN CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in allocating Integer') RETURN ENDIF ENDIF !------------------------------------------------------------------ ! 1st Declare the local grid (ORCA tripolar) characteristics for ! surface coupling. The halo regions must be excluded. For ! surface coupling it is sufficient to specify only one ! vertical z-level. !------------------------------------------------------------------ grid_type = PRISM_irrlonlat_regvrt IF(lwp) WRITE(numout,*) "Set grid type" ! ----------------------------------------------------------------- ! ... Define the shape of the valid region without the halo. ! For serial configuration (key_mpp_mpi not being active) ! nl* is set to the global values 1 and jp*glo. ! ----------------------------------------------------------------- IF ( rootexchg ) THEN shape(1,1) = 1+jpreci shape(2,1) = jpiglo-jpreci shape(1,2) = 1+jpreci shape(2,2) = jpjglo-jpreci shape(1,3) = 1 shape(2,3) = 1 ELSE shape(1,1) = 1+jpreci shape(2,1) = jpi-jpreci shape(1,2) = 1+jpreci shape(2,2) = jpj-jpreci shape(1,3) = 1 shape(2,3) = 1 ENDIF IF(lwp) WRITE(numout,*) "commrank is", commRank IF ( commRank ) THEN IF(lwp) WRITE(numout,*) "CALLING DEF_GRID" IF(lwp) WRITE(numout,*) "grid name",grid_name IF(lwp) WRITE(numout,*) " shape",shape IF(lwp) WRITE(numout,*) "grid type",grid_type CALL prism_def_grid ( grid_id(1), grid_name, comp_id, shape, & grid_type, ierror ) IF ( ierror /= PRISM_Success ) THEN PRINT *, 'OPA cpl_prism_define: Failure in prism_def_grid' CALL prism_abort (comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_def_grid') ENDIF !------------------------------------------------------------------ ! 2nd Declare the geometic information for this grid. !------------------------------------------------------------------ ! ----------------------------------------------------------------- ! ... Redefine shape which may now include the halo region as well. ! ----------------------------------------------------------------- shape(1,1) = 1 shape(2,1) = jpi shape(1,2) = 1 shape(2,2) = jpj shape(1,3) = 1 shape(2,3) = 1 IF(lwp) WRITE(numout,*) "redefined shape",shape ! ----------------------------------------------------------------- ! ... Define the elements, i.e. specify the corner points for each ! volume element. In case OPA runs on level coordinates (regular ! in the vertical) we only need to give the 4 horizontal corners ! for a volume element plus the vertical position of the upper ! and lower face. Nevertheless the volume element has 8 corners. ! ----------------------------------------------------------------- ! ! ... Treat corners in the horizontal plane ! ALLOCATE(rclon(shape(1,1):shape(2,1),shape(1,2):shape(2,2),4), & STAT=ierror) IF ( ierror /= 0 ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: error in allocate for rclon') ALLOCATE(rclat(shape(1,1):shape(2,1),shape(1,2):shape(2,2),4), & STAT=ierror) IF ( ierror /= 0 ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: error in allocate for rclon') nbr_corners = 8 ! ! ... Set right longitudes and upper latitudes ! DO jj = shape(1,2), shape(2,2) DO ji = shape(1,1), shape(2,1) rclon(ji,jj,1) = glamu(ji,jj) rclon(ji,jj,2) = glamu(ji,jj) rclat(ji,jj,2) = gphiv(ji,jj) rclat(ji,jj,3) = gphiv(ji,jj) ENDDO ENDDO ! ! ... Set the lower latitudes ! DO jj = shape(1,2)+1, shape(2,2) DO ji = shape(1,1), shape(2,1) rclat(ji,jj-1,1) = rclat(ji,jj,2) rclat(ji,jj-1,4) = rclat(ji,jj,3) ENDDO ENDDO ! ! ... Set the left longitudes ! DO jj = shape(1,2), shape(2,2) DO ji = shape(1,1)+1, shape(2,1) rclon(ji-1,jj,3) = rclon(ji,jj,2) rclon(ji-1,jj,4) = rclon(ji,jj,1) ENDDO ENDDO ! ! ... Set the lowermost latitudes ! DO jj = shape(1,2), shape(1,2) DO ji = shape(1,1), shape(2,1) rclat(ji,jj,1) = 2.0*gphit(ji,jj)-rclat(ji,jj,2) rclat(ji,jj,4) = 2.0*gphit(ji,jj)-rclat(ji,jj,4) ENDDO ENDDO ! ! ... Set the rightmost latitudes ! DO jj = shape(1,2), shape(2,2) DO ji = shape(1,2), shape(1,2) rclon(ji,jj,3) = 2.0*glamt(ji,jj)-rclon(ji,jj,2) rclon(ji,jj,4) = 2.0*glamt(ji,jj)-rclon(ji,jj,1) WRITE(76,*) "rclon", ji, jj, rclon(ji,jj,1), & rclon(ji,jj,2), & rclon(ji,jj,3), & rclon(ji,jj,4) WRITE(76,*) "rclat", ji, jj, rclat(ji,jj,1), & rclat(ji,jj,2), & rclat(ji,jj,3), & rclat(ji,jj,4) ENDDO ENDDO ! ! ... Treat corners along the vertical axis ! ALLOCATE(rcz(shape(1,3):shape(2,3),2), STAT=ierror) IF ( ierror /= 0 ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: error in allocate for rcz') DO jk = shape(1,3), shape(2,3) rcz(jk,1) = gdepw(jk) rcz(jk,2) = gdepw(jk+1) ENDDO IF(lwp) WRITE(numout,*) "ABOUT TO CALL SET CORNERS",shape CALL prism_set_corners ( grid_id(1), nbr_corners, shape, rclon, rclat, & rcz, ierror) IF ( ierror /= PRISM_Success ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_set_corners') DEALLOCATE(rclon, rclat, rcz) ! ----------------------------------------------------------------- ! ... Define the gridpoints ! ----------------------------------------------------------------- new_points = .TRUE. IF(lwp) WRITE(numout,*) "CALLING SET_POINTS" ! ! ... the u-points ! point_name = 'u-points' CALL prism_set_points ( upoint_id(1), point_name, grid_id(1), shape, & glamu, gphiu, gdept(shape(1,3):shape(2,3)), new_points, ierror ) IF ( ierror /= PRISM_Success ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_set_points upoint_id') ! ! ... the v-points ! IF(lwp) WRITE(numout,*) "CALLING SET_POINTS done u doing v" point_name = 'v-points' CALL prism_set_points ( vpoint_id(1), point_name, grid_id(1), shape, & glamv, gphiv, gdept(shape(1,3):shape(2,3)), new_points, ierror ) IF ( ierror /= PRISM_Success ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_set_points vpoint_id') ! ! ... the t-points ! ! WRITE(76,*) 'CALLING T POINTS', shape ! WRITE(77,*) 'glamt', glamt ! WRITE(78,*) 'gphit', gphit ! point_name = 't-points' CALL prism_set_points ( tpoint_id(1), point_name, grid_id(1), shape, & glamt, gphit, gdept(shape(1,3):shape(2,3)), new_points, ierror ) IF ( ierror /= PRISM_Success ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_set_points tpoint_id') ! ! ... the f-points ! point_name = 'f-points' CALL prism_set_points ( fpoint_id(1), point_name, grid_id(1), shape, & glamf, gphif, gdept(shape(1,3):shape(2,3)), new_points, ierror ) IF ( ierror /= PRISM_Success ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_set_points fpoint_id') IF(lwp) WRITE(numout,*) "CALLING SET_POINTS done f" ! ----------------------------------------------------------------- ! ... Convert OPA masks to logicals and define the masks ! ----------------------------------------------------------------- new_mask = .true. mask = (umask == 1) CALL prism_set_mask (umask_id(1), grid_id(1), shape, & mask(shape(1,1):shape(2,1), & shape(1,2):shape(2,2), & shape(1,3):shape(2,3)), & new_mask, ierror ) IF ( ierror /= PRISM_Success ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_set_mask umask_id') mask = (vmask == 1) CALL prism_set_mask (vmask_id(1), grid_id(1), shape, & mask(shape(1,1):shape(2,1), & shape(1,2):shape(2,2), & shape(1,3):shape(2,3)), & new_mask, ierror ) IF ( ierror /= PRISM_Success ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_set_mask umask_id') mask = (tmask == 1) CALL prism_set_mask (tmask_id(1), grid_id(1), shape, & mask(shape(1,1):shape(2,1), & shape(1,2):shape(2,2), & shape(1,3):shape(2,3)), & new_mask, ierror ) IF ( ierror /= PRISM_Success ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_set_mask umask_id') mask = (fmask == 1) CALL prism_set_mask (fmask_id(1), grid_id(1), shape, & mask(shape(1,1):shape(2,1), & shape(1,2):shape(2,2), & shape(1,3):shape(2,3)), & new_mask, ierror ) IF ( ierror /= PRISM_Success ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_set_mask umask_id') IF(lwp) WRITE(numout,*) "DONE ALL THE SET MASKS" ! ----------------------------------------------------------------- ! ... Define the angles ! This is needed if zonal tau is not oriented E-W and meridional ! tau is not oriented along N-S but rather along local coordinate ! axis. Please check!!!! ! ----------------------------------------------------------------- !rr cal prism_set_angles ( ..., ierror ) ! not yet supported by OASIS4 ! ----------------------------------------------------------------- ! ... Define the partition ! ----------------------------------------------------------------- IF ( rootexchg ) THEN range(1) = nimpp-1+nldi ! global start in i range(2) = nlei-nldi+1 ! local size in i of valid region range(3) = njmpp-1+nldj ! global start in j range(4) = nlej-nldj+1 ! local size in j of valid region range(5) = range(2) & * range(4) ! local horizontal size ! ! Collect ranges from all NEMO procs on the local root process ! CALL mpi_gather(range, 5, MPI_INTEGER, & ranges, 5, MPI_INTEGER, localRoot, localComm, ierror) IF ( localRank == localRoot ) THEN maxlen = maxval(ranges(5,:)) ALLOCATE(buffer(1:maxlen), stat = ierror) IF (ierror > 0) THEN CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in allocating buffer') RETURN ENDIF ENDIF ENDIF ! ----------------------------------------------------------------- ! ... Define the scalefactors ! ----------------------------------------------------------------- !rr WRITE(numout,*) "CALLING SCALEFACTOR" !rr call prism_set_scalefactor ( grid_id(1), shape, e1t, e2t, e3t, ierror ) ! not yet supported by OASIS4 !rr WRITE(numout,*) "ABOUT TO DEFINE THE TRANSIENTS" !------------------------------------------------------------------ ! 3rd Declare the transient variables !------------------------------------------------------------------ ! ! ... Define symbolic names for the transient fields send by the ocean ! These must be identical to the names specified in the SMIOC file. ! cpl_send( 1)='SOSSTSST' ! sea surface temperature -> sst_io cpl_send( 2)='SITOCEAN' ! sea ice thickness -> hicif (only 1 layer available!) #if defined key_cpl_albedo cpl_send( 3)='STIOCEAN' ! surface temperature over sea ice -> tn_ice cpl_send( 4)='SAIOCEAN' ! albedo over sea ice -> alb_ice #else cpl_send( 3)='SITOCEAN' ! sea ice thickness -> hicif (only 1 layer available!) cpl_send( 4)='SNTOCEAN' ! surface snow thickness over sea ice -> hsnif #endif #if defined key_cpl_ocevel cpl_send( 5)='SUNOCEAN' ! U-velocity -> un cpl_send( 6)='SVNOCEAN' ! V-velocity -> vn #endif ! ! ... Define symbolic names for transient fields received by the ocean. ! These must be identical to the names specified in the SMIOC file. ! ! ... a) U-Grid fields ! cpl_recv( 1)='TXWOCEWU' ! weighted surface downward eastward stress cpl_recv( 2)='TYWOCEWU' ! weighted surface downward northward stress cpl_recv( 3)='TXIOCEWU' ! weighted surface downward eastward stress over ice cpl_recv( 4)='TYIOCEWU' ! weighted surface downward northward stress over ice ! ! ... a) V-Grid fields ! cpl_recv( 5)='TXWOCEWV' ! weighted surface downward eastward stress cpl_recv( 6)='TYWOCEWV' ! weighted surface downward northward stress cpl_recv( 7)='TXIOCEWV' ! weighted surface downward eastward stress over ice cpl_recv( 8)='TYIOCEWV' ! weighted surface downward northward stress over ice ! ! ... a) T-Grid fields ! cpl_recv( 9)='FRWOCEPE' ! P-E over water -> zpew cpl_recv(10)='FRIOCEPE' ! P-E over ice -> zpei cpl_recv(11)='FRROCESN' ! surface downward snow fall -> zpsol cpl_recv(12)='FRIOCEEV' ! surface upward snow flux where sea ice -> zevice cpl_recv(13)='QSWOCESR' ! surface net downward shortwave flux -> qsr_oce cpl_recv(14)='QSWOCENS' ! surface downward non-solar heat flux in air -> qnsr_oce cpl_recv(15)='QSIOCESR' ! solar heat flux on sea ice -> qsr_ice cpl_recv(16)='QSIOCENS' ! non-solar heat flux on sea ice -> qnsr_ice cpl_recv(17)='QSIOCEDQ' ! non-solar heat flux derivative -> dqns_ice #ifdef key_cpl_discharge cpl_recv(18)='FRWOCEIS' ! ice discharge into ocean -> calving cpl_recv(19)='FRWOCERD' ! river discharge into ocean -> zrunriv cpl_recv(20)='FRWOCECD' ! continental discharge into ocean -> zruncot #endif IF ( wp == 4 ) data_type = PRISM_REAL IF ( wp == 8 ) data_type = PRISM_DOUBLE_PRECISION nodim(1) = 3 ! check nodim(2) = 0 ! ! ... Announce send variables, all on T points. ! DO ji = 1, nsend ! if ( ji == 2 ) ; then ; nodim(2) = 2 ; else ; nodim(2) = 0 ; endif CALL prism_def_var (send_id(ji), cpl_send(ji), grid_id(1), & tpoint_id(1), tmask_id(1), nodim, shape, data_type, ierror) IF ( ierror /= PRISM_Success ) THEN PRINT *, 'Failed to define transient ', ji, TRIM(cpl_send(ji)) CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_def_var') ENDIF ENDDO ! nodim(1) = 3 ! check nodim(2) = 0 ! ! ... Announce recv variables. ! ! ... a) on U points ! DO ji = 1, 4 CALL prism_def_var (recv_id(ji), cpl_recv(ji), grid_id(1), & upoint_id(1), umask_id(1), nodim, shape, data_type, ierror) IF ( ierror /= PRISM_Success ) THEN PRINT *, 'Failed to define transient ', ji, TRIM(cpl_recv(ji)) CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_def_var') ENDIF ENDDO ! ! ... b) on V points ! DO ji = 5, 8 CALL prism_def_var (recv_id(ji), cpl_recv(ji), grid_id(1), & vpoint_id(1), vmask_id(1), nodim, shape, data_type, ierror) IF ( ierror /= PRISM_Success ) THEN PRINT *, 'Failed to define transient ', TRIM(cpl_recv(ji)) CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_def_var') ENDIF ENDDO ! ! ... c) on T points ! DO ji = 9, nrecv CALL prism_def_var (recv_id(ji), "SORUNOFF", grid_id(1), & tpoint_id(1), tmask_id(1), nodim, shape, data_type, ierror) IF ( ierror /= PRISM_Success ) THEN PRINT *, 'Failed to define transient ', TRIM(cpl_recv(ji)) CALL prism_abort ( comp_id, 'OPA9.0', 'OPA cpl_prism_define: Failure in prism_def_var') ENDIF ENDDO ENDIF ! commRank !------------------------------------------------------------------ ! 4th End of definition phase !------------------------------------------------------------------ IF(lwp) WRITE(numout,*) "ABOUT TO CALL PRISM_ENDDEF" CALL prism_enddef(ierror) IF(lwp) WRITE(numout,*) "DONE ENDDEF",ierror IF ( ierror /= PRISM_Success ) & CALL prism_abort ( comp_id, 'OPA9.0', 'cpl_prism_define: Failure in prism_enddef') IF(lwp) WRITE(numout,*) "ALL DONE, EXITING PRISM SET UP PHASE" END SUBROUTINE cpl_prism_define SUBROUTINE cpl_prism_send( var_id, date, data_array, info ) IMPLICIT NONE !!--------------------------------------------------------------------- !! *** ROUTINE cpl_prism_send *** !! !! ** Purpose : - At each coupling time-step,this routine sends fields !! like sst or ice cover to the coupler or remote application. !! !! ** Method : OASIS4 !!---------------------------------------------------------------------- !! * Arguments !! INTEGER, INTENT( IN ) :: var_id ! variable Id INTEGER, INTENT( OUT ) :: info ! variable Id INTEGER, INTENT( IN ) :: date ! ocean time-step in seconds REAL(wp) :: data_array(:,:) !! !! * Local declarations !! #if defined key_mpp_mpi REAL(wp) :: global_array(jpiglo,jpjglo) ! !mpi INTEGER :: status(MPI_STATUS_SIZE) !mpi INTEGER :: type ! MPI data type INTEGER :: request ! MPI isend request INTEGER :: ji, jj, jn ! local loop indicees #else INTEGER :: ji #endif !! INTEGER, SAVE :: ncount = 0 !! !!-------------------------------------------------------------------- !! ncount = ncount + 1 #if defined key_mpp_mpi request = 0 IF ( rootexchg ) THEN ! !mpi IF ( wp == 4 ) type = MPI_REAL !mpi IF ( wp == 8 ) type = MPI_DOUBLE_PRECISION ! ! collect data on the local root process ! IF ( localRank /= localRoot ) THEN DO jj = nldj, nlej DO ji = nldi, nlei exfld(ji-nldi+1,jj-nldj+1)=data_array(ji,jj) ENDDO ENDDO !mpi CALL mpi_send(exfld, range(5), type, localRoot, localRank, localComm, ierror) CALL mppsend (localRank, exfld, range(5), localRoot, request) ENDIF IF ( localRank == localRoot ) THEN DO jj = ranges(3,localRoot), ranges(3,localRoot)+ranges(4,localRoot)-1 DO ji = ranges(1,localRoot), ranges(1,localRoot)+ranges(2,localRoot)-1 global_array(ji,jj) = data_array(ji,jj) ! workaround ENDDO ENDDO DO jn = 1, localSize-1 !mpi CALL mpi_recv(buffer, ranges(5,jn), type, localRoot, jn, localComm, status, ierror) CALL mpprecv(jn, buffer, ranges(5,jn)) DO jj = ranges(3,jn), ranges(3,jn)+ranges(4,jn)-1 DO ji = ranges(1,jn), ranges(1,jn)+ranges(2,jn)-1 global_array(ji,jj) = buffer( (jj-ranges(3,jn))*ranges(2,jn) + ji-ranges(1,jn)+1 ) ENDDO ENDDO ENDDO ENDIF ! ! send data from local root to OASIS4 ! CALL prism_put ( var_id, dates, dates_bound, global_array, info, ierror ) ELSE ! ! send local data from every process to OASIS4 ! CALL prism_put ( var_id, dates, dates_bound, data_array, info, ierror ) ENDIF !rootexchg #else ! ! send local data from every process to OASIS4 ! IF ( commRank ) & CALL prism_put ( var_id, dates, dates_bound, data_array, info, ierror ) #endif IF ( commRank ) THEN IF (l_ctl) THEN IF ( info==PRISM_Cpl ) THEN WRITE(numout,*) '****************' DO ji = 1, nsend IF (var_id == send_id(ji) ) THEN WRITE(numout,*) 'prism_put_proto: Outgoing ', cpl_send(ji) EXIT ENDIF ENDDO WRITE(numout,*) 'prism_put: var_id ', var_id WRITE(numout,*) 'prism_put: date ', date WRITE(numout,*) 'prism_put: info ', info WRITE(numout,*) ' - Minimum value is ', MINVAL(data_array) WRITE(numout,*) ' - Maximum value is ', MAXVAL(data_array) WRITE(numout,*) ' - Sum value is ', SUM(data_array) WRITE(numout,*) '****************' ENDIF ENDIF IF ( ncount == nrecv ) THEN ! ! 3. Update dates and dates_bound for next step. We assume that cpl_prism_send ! is called for all send fields at each time step. Therefore we update ! the date argument to prism_put only every nsend call to cpl_prism_send. ! dates_bound(1) = dates_bound(2) tmpdate = dates_bound(2) date_incr = rdCplttra(1)/2.0 CALL PRISM_calc_newdate ( tmpdate, date_incr, ierror ) dates = tmpdate CALL PRISM_calc_newdate ( tmpdate, date_incr, ierror ) dates_bound(2) = tmpdate ncount = 0 ENDIF ENDIF ! commRank END SUBROUTINE cpl_prism_send SUBROUTINE cpl_prism_recv( var_id, date, data_array, info ) IMPLICIT NONE !!--------------------------------------------------------------------- !! *** ROUTINE cpl_prism_recv *** !! !! ** Purpose : - At each coupling time-step,this routine receives fields !! like stresses and fluxes from the coupler or remote application. !! !! ** Method : OASIS4 !!---------------------------------------------------------------------- !! * Arguments !! INTEGER, INTENT( IN ) :: var_id ! variable Id INTEGER, INTENT( OUT ) :: info ! variable Id INTEGER, INTENT( IN ) :: date ! ocean time-step in seconds REAL(wp),INTENT( OUT ) :: data_array(:,:) !! !! * Local declarations !! #if defined key_mpp_mpi REAL(wp) :: global_array(jpiglo,jpjglo) ! LOGICAL :: action = .false. !mpi INTEGER :: status(MPI_STATUS_SIZE) !mpi INTEGER :: type ! MPI data type INTEGER :: request ! MPI isend request INTEGER :: ji, jj, jn ! local loop indicees #else INTEGER :: ji #endif INTEGER, SAVE :: ncount = 0 !! !!-------------------------------------------------------------------- !! ncount = ncount + 1 #ifdef key_mpp_mpi request = 0 IF ( rootexchg ) THEN ! ! receive data from OASIS4 on local root ! IF ( commRank ) & CALL prism_get (var_id, dater, dater_bound, global_array, info, ierror) CALL MPI_BCAST ( info, 1, MPI_INTEGER, localRoot, localComm, ierror ) ELSE ! ! receive local data from OASIS4 on every process ! CALL prism_get (var_id, dater, dater_bound, exfld, info, ierror) ENDIF action = (info==PRISM_CplIO) IF ( rootexchg .and. action ) THEN ! !mpi IF ( wp == 4 ) type = MPI_REAL !mpi IF ( wp == 8 ) type = MPI_DOUBLE_PRECISION ! ! distribute data to processes ! IF ( localRank == localRoot ) THEN DO jj = ranges(3,localRoot), ranges(3,localRoot)+ranges(4,localRoot)-1 DO ji = ranges(1,localRoot), ranges(1,localRoot)+ranges(2,localRoot)-1 exfld(ji,jj) = global_array(ji,jj) ENDDO ENDDO DO jn = 1, localSize-1 DO jj = ranges(3,jn), ranges(3,jn)+ranges(4,jn)-1 DO ji = ranges(1,jn), ranges(1,jn)+ranges(2,jn)-1 buffer( (jj-ranges(3,jn))*ranges(2,jn) + ji-ranges(1,jn)+1 ) = global_array(ji,jj) ENDDO ENDDO !mpi CALL mpi_send(buffer, ranges(5,jn), type, jn, jn, localComm, ierror) CALL mppsend (jn, buffer, ranges(5,jn), jn, request) ENDDO ENDIF IF ( localRank /= localRoot ) & !mpi CALL mpi_recv(exfld, range(5), type, localRoot, localRank, localComm, status, ierror) CALL mpprecv(localRank, exfld, range(5)) ENDIF IF ( action ) THEN data_array = 0.0 DO jj = nldj, nlej DO ji = nldi, nlei data_array(ji,jj)=exfld(ji-nldi+1,jj-nldj+1) ENDDO ENDDO IF (l_ctl) THEN WRITE(numout,*) '****************' DO ji = 1, nrecv IF (var_id == recv_id(ji) ) THEN WRITE(numout,*) 'prism_get: Incoming ', cpl_recv(ji) EXIT ENDIF ENDDO WRITE(numout,*) 'prism_get: var_id ', var_id WRITE(numout,*) 'prism_get: date ', date WRITE(numout,*) 'prism_get: info ', info WRITE(numout,*) ' - Minimum value is ', MINVAL(data_array) WRITE(numout,*) ' - Maximum value is ', MAXVAL(data_array) WRITE(numout,*) ' - Sum value is ', SUM(data_array) WRITE(numout,*) '****************' ENDIF ENDIF #else CALL prism_get (var_id, dater, dater_bound, exfld, info, ierror) IF ( info==PRISM_CplIO ) THEN data_array=exfld IF (l_ctl) THEN WRITE(numout,*) '****************' DO ji = 1, nrecv IF (var_id == recv_id(ji) ) THEN WRITE(numout,*) 'prism_get: Incoming ', cpl_recv(ji) EXIT ENDIF ENDDO WRITE(numout,*) 'prism_get: var_id ', var_id WRITE(numout,*) 'prism_get: date ', date WRITE(numout,*) 'prism_get: info ', info WRITE(numout,*) ' - Minimum value is ', MINVAL(data_array) WRITE(numout,*) ' - Maximum value is ', MAXVAL(data_array) WRITE(numout,*) ' - Sum value is ', SUM(data_array) WRITE(numout,*) '****************' ENDIF ENDIF #endif IF ( ncount == nrecv ) THEN ! ! 3. Update dater and dater_bound for next step. We assume that cpl_prism_recv ! is called for all recv fields at each time step. Therefore we update ! the date argument to prism_get only every nrecv call to cpl_prism_recv. ! dater_bound(1) = dater_bound(2) tmpdate = dater_bound(2) date_incr = rdttra(1)/2.0 CALL PRISM_calc_newdate ( tmpdate, date_incr, ierror ) dater = tmpdate CALL PRISM_calc_newdate ( tmpdate, date_incr, ierror ) dater_bound(2) = tmpdate ncount = 0 ENDIF END SUBROUTINE cpl_prism_recv SUBROUTINE cpl_prism_finalize IMPLICIT NONE !!--------------------------------------------------------------------- !! *** ROUTINE cpl_prism_finalize *** !! !! ** Purpose : - Finalizes the coupling. If MPI_init has not been !! called explicitly before cpl_prism_init it will also close !! MPI communication. !! !! ** Method : OASIS4 !!---------------------------------------------------------------------- DEALLOCATE(exfld) if ( prism_was_initialized ) then call prism_terminated ( prism_was_terminated, ierror ) if ( prism_was_terminated ) then print *, 'prism has already been terminated.' else call prism_terminate ( ierror ) prism_was_terminated = .true. endif else print *, 'Initialize prism before terminating it.' endif END SUBROUTINE cpl_prism_finalize #else !!---------------------------------------------------------------------- !! Default case Dummy module forced Ocean/Atmosphere !!---------------------------------------------------------------------- CONTAINS SUBROUTINE cpl_prism_init ! Dummy routine END SUBROUTINE cpl_prism_init SUBROUTINE cpl_prism_define ! Dummy routine END SUBROUTINE cpl_prism_define SUBROUTINE cpl_prism_send ! Dummy routine END SUBROUTINE cpl_prism_send SUBROUTINE cpl_prism_recv ! Dummy routine END SUBROUTINE cpl_prism_recv SUBROUTINE cpl_prism_finalize ! Dummy routine END SUBROUTINE cpl_prism_finalize #endif END MODULE cpl_oasis4