source: CPL/oasis3-mct/branches/OASIS3-MCT_2.0_branch/lib/mct/examples/climate_concur1/coupler.F90 @ 4775

Last change on this file since 4775 was 4775, checked in by aclsce, 4 years ago
  • Imported oasis3-mct from Cerfacs svn server (not suppotred anymore).

The version has been extracted from https://oasis3mct.cerfacs.fr/svn/branches/OASIS3-MCT_2.0_branch/oasis3-mct@1818

File size: 10.7 KB
Line 
1!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
2!    Math and Computer Science Division, Argonne National Laboratory   !
3!-----------------------------------------------------------------------
4! CVS coupler.F90,v 1.8 2004-04-23 20:57:10 jacob Exp
5! CVS MCT_2_8_0
6!BOP -------------------------------------------------------------------
7!
8! !ROUTINE: coupler -- coupler for unit tester
9!
10! !DESCRIPTION:
11! A coupler subroutine to test functionality of MCT.
12!
13! !INTERFACE:
14!
15      subroutine coupler (comm,ncomps,compid)
16!
17! !USES:
18!
19! Get the things needed from MCT by "Use,only" with renaming:
20!
21! ---------- first group is identical to what model.F90 uses ----
22!
23!---Component Model Registry
24      use m_MCTWorld,only: MCTWorld_init => init
25      use m_MCTWorld,only: MCTWorld_clean => clean
26!---Domain Decomposition Descriptor DataType and associated methods
27      use m_GlobalSegMap,only: GlobalSegMap
28      use m_GlobalSegMap,only: GlobalSegMap_init => init
29      use m_GlobalSegMap,only: GlobalSegMap_lsize => lsize
30      use m_GlobalSegMap,only: GlobalSegMap_clean => clean
31      use m_GlobalSegMap,only: GlobalSegMap_Ordpnts => OrderedPoints
32!---Field Storage DataType and associated methods
33      use m_AttrVect,only    : AttrVect
34      use m_AttrVect,only    : AttrVect_init => init
35      use m_AttrVect,only    : AttrVect_clean => clean
36      use m_AttrVect,only    : AttrVect_importRAttr => importRAttr
37!---Intercomponent communications scheduler
38      use m_Router,only: Router
39      use m_Router,only: Router_init => init
40      use m_Router,only: Router_clean => clean
41!---Intercomponent transfer
42      use m_Transfer,only : MCT_Send => send
43      use m_Transfer,only : MCT_Recv => recv
44
45! ---------- because coupler will do the interpolation  ---------
46!                it needs more methods
47!
48!---Sparse Matrix DataType and associated methods
49      use m_SparseMatrix, only : SparseMatrix
50      use m_SparseMatrix, only : SparseMatrix_init => init
51      use m_SparseMatrix, only : SparseMatrix_importGRowInd => &
52                                                      importGlobalRowIndices
53      use m_SparseMatrix, only : SparseMatrix_importGColInd => &
54                                                      importGlobalColumnIndices
55      use m_SparseMatrix, only : SparseMatrix_importMatrixElts => &
56                                                           importMatrixElements
57      use m_SparseMatrixPlus, only : SparseMatrixPlus
58      use m_SparseMatrixPlus, only : SparseMatrixPlus_init => init
59      use m_SparseMatrixPlus, only : SparseMatrixPlus_clean => clean
60      use m_SparseMatrixPlus, only : Xonly ! Decompose matrix by row
61!---Matrix-Vector multiply methods
62      use m_MatAttrVectMul, only: MCT_MatVecMul => sMatAvMult
63
64!---MPEU I/O utilities
65      use m_stdio
66      use m_ioutil
67
68      implicit none
69
70      include "mpif.h"
71
72! !INPUT PARAMETERS:
73
74      integer,intent(in) :: comm 
75      integer,intent(in) :: ncomps
76      integer,intent(in) :: compid
77!
78!EOP ___________________________________________________________________
79
80!     Local variables
81
82      character(len=*), parameter :: cplname='coupler.F90'
83
84      integer :: nxa   !  number of points in x-direction, atmos
85      integer :: nya   !  number of points in y-direction, atmos
86      integer :: nxo  !  number of points in x-direction, ocean
87      integer :: nyo   !  number of points in y-direction, ocean
88
89      character(len=100),parameter :: &
90        RemapMatrixFile='../../data/t42_to_popx1_c_mat.asc'
91
92! Loop indicies
93      integer :: i,j,k,n
94
95      logical :: match
96
97! MPI variables
98      integer :: rank, nprocs, root, ierr
99! MCTWorld variables
100      integer :: AtmID
101! Grid variables
102      integer :: localsize 
103! GlobalSegMap variables
104      type(GlobalSegMap)   :: AtmGSMap, OcnGSMap       
105      integer,dimension(1) :: start,length 
106      integer, dimension(:), pointer :: points
107      integer :: latsize, lonsize
108      integer :: rowindex, colindex, boxvertex
109! AttVect variables
110      type(AttrVect) :: AtmAV, OcnAV           
111      integer :: aavsize,oavsize
112! Router variables
113      type(Router) :: Rout
114! SparseMatrix variables
115      integer :: mdev
116      integer :: num_elements, nRows, nColumns
117      integer, dimension(2) :: src_dims, dst_dims
118      integer, dimension(:), pointer :: rows, columns
119      real, dimension(:), pointer :: weights
120! A2O SparseMatrix elements on root
121      type(SparseMatrix) :: sMat
122! A2O distributed SparseMatrixPlus variables
123      type(SparseMatrixPlus) :: A2OMatPlus
124! _____________________________________________________________________
125
126!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
127!   INITIALIZATION PHASE
128!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
129
130     
131      ! LOCAL RANK AND SIZE
132      call MPI_COMM_RANK(comm,rank,ierr)
133      call MPI_COMM_SIZE(comm,nprocs,ierr)
134      root = 0
135
136      if(rank==0) write(6,*) cplname,' MyID ', compid
137      if(rank==0) write(6,*) cplname,' Num procs ', nprocs
138
139      ! Initialize MCTworld
140      call MCTWorld_init(ncomps,MPI_COMM_WORLD,comm,compid)
141
142      ! Set the atm component id.  Must be known to this
143      !  component.  (MCT doesn't handle that).
144      AtmID=1
145
146      ! Set grid dimensions for atmosphere and ocean grids.
147      ! MCT could be used for this (by defining a GeneralGrid in
148      ! each and sending them to the coupler) but for this simple
149      ! example, we'll assume they're known to the coupler
150      nxa = 128
151      nya = 64
152 
153      nxo = 320
154      nyo = 384
155     
156      !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
157      ! Read matrix weights for interpolation from a file.
158      if (rank == root) then
159         mdev = luavail()
160         open(mdev, file=trim(RemapMatrixFile), status="old")
161         read(mdev,*) num_elements
162         read(mdev,*) src_dims(1), src_dims(2)
163         read(mdev,*) dst_dims(1), dst_dims(2)
164         
165         allocate(rows(num_elements), columns(num_elements), &
166              weights(num_elements), stat=ierr)
167
168         do n=1, num_elements
169            read(mdev,*) rows(n), columns(n), weights(n)
170         end do
171         
172         close(mdev)
173
174         ! Initialize a Sparsematrix
175         nRows = dst_dims(1) * dst_dims(2)
176         nColumns = src_dims(1) * src_dims(2)     
177         call SparseMatrix_init(sMat,nRows,nColumns,num_elements)
178         call SparseMatrix_importGRowInd(sMat, rows, size(rows))
179         call SparseMatrix_importGColInd(sMat, columns, size(columns))
180         call SparseMatrix_importMatrixElts(sMat, weights, size(weights))
181
182         deallocate(rows, columns, weights, stat=ierr)
183
184      endif
185      !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
186
187      !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
188      ! Initialize a Global Segment Map for the Ocean
189
190      ! Set up a 1-d decomposition.
191      ! There is just 1 segment per processor
192      localsize = nxo*nyo / nprocs
193
194      ! we'll use the distributed init of GSMap so
195      ! initialize start and length arrays for this processor
196      start(1) = (rank*localsize) + 1
197      length(1) = localsize
198
199      ! initialize the GSMap
200      call GlobalSegMap_init(OcnGSMap,start,length,root,comm,compid)
201      !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
202
203
204      !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
205      ! Initialize a Global Segment Map for the Atmosphere
206
207      ! Set up a 1-d decomposition.
208      ! There is just 1 segment per processor
209      localsize = nxa*nya / nprocs
210
211      ! we'll use the distributed init of GSMap so
212      ! initialize start and length arrays for this processor
213      start(1) = (rank*localsize) + 1
214      length(1) = localsize
215
216      ! initialize the GSMap
217      call GlobalSegMap_init(AtmGSMap,start,length,root,comm,compid)
218      !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
219
220      ! Use a GSMap function:
221      ! return the points local to this processor
222      ! in their assumed order.
223      call GlobalSegMap_Ordpnts(AtmGSMap,rank,points)
224
225
226      !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
227      ! Build a SparseMatrixPlus for doing the interpolation
228      ! Specify matrix decomposition to be by row.
229      ! following the atmosphere's decomposition.
230      call SparseMatrixPlus_init(A2OMatPlus, sMat, AtmGSMap, OcnGSMap, &
231           Xonly, root, comm, compid)
232
233      !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
234
235      !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
236      ! Initialize and Attribute vector the atmosphere grid
237      aavsize = GlobalSegMap_lsize(AtmGSMap,comm)
238      if(rank==0) write(6,*) cplname, ' localsize: Atm ', aavsize
239      call AttrVect_init(AtmAV,rList="field1:field2",lsize=aavsize)
240
241
242      ! Initialize and Attribute vector the ocean grid
243      oavsize = GlobalSegMap_lsize(OcnGSMap,comm)
244      if(rank==0) write(6,*) cplname, ' localsize: Ocn ', oavsize
245      call AttrVect_init(OcnAV,rList="field1:field2",lsize=oavsize)
246      !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
247
248      !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
249      ! Initialize a Router
250      call Router_init(AtmID,AtmGSMap,comm,Rout)
251
252!!! END OF INIT !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
253
254
255!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
256!   RUN PHASE
257!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
258
259      do j=1,10   ! "timestep" loop
260
261
262        ! coupler calculations here
263
264        match=.TRUE.
265
266        ! Receive the data
267        call MCT_Recv(AtmAV,Rout)
268
269        ! The 2nd attribute has the values of each gridpoint in
270        ! the index numbering scheme.  Check the received values
271        ! against the points on the this processor.  They should
272        ! match exactly.
273        do i=1,aavsize
274           if( int(AtmAV%rAttr(2,i)) .ne. points(i)) then
275             write(6,*) cplname,rank, " Data doesn't match ",i
276             match=.FALSE.
277           endif
278        enddo
279        if(match .and. j==10) &
280         write(6,*) cplname," Last step, All points match on ",rank
281
282        if(rank==0) write(6,*) cplname, " Received data step ",j
283
284        ! Interpolate by doing a parallel sparsematrix-attrvect multiply
285        ! Note:  it doesn't make much sense to interpolate "field2" which
286        ! is the grid point indicies but MatVecMul will interpolate all
287        ! real attributes.
288        call MCT_MatVecMul(AtmAV, A2OMatPlus, OcnAV)
289        if(rank==0) write(6,*) cplname," Data transformed step ",j
290
291
292        ! pass interpolated data on to ocean model and/or
293        !  do more calculations
294
295      enddo
296
297
298!!! END OF RUN  !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
299
300
301!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
302!   FINALIZE PHASE
303!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
304
305      ! deallocate memory
306      call Router_clean(Rout)
307      call AttrVect_clean(AtmAV)
308      call AttrVect_clean(OcnAV)
309      call GlobalSegMap_clean(AtmGSMap)
310      call GlobalSegMap_clean(OcnGSMap)
311      call MCTWorld_clean()
312      if(rank==0) write(6,*) cplname, " done"
313
314    end subroutine coupler
315         
Note: See TracBrowser for help on using the repository browser.