source: CPL/oasis3-mct/branches/OASIS3-MCT_2.0_branch/lib/mct/examples/climate_concur1/master.F90 @ 4775

Last change on this file since 4775 was 4775, checked in by aclsce, 5 years ago
  • Imported oasis3-mct from Cerfacs svn server (not suppotred anymore).

The version has been extracted from https://oasis3mct.cerfacs.fr/svn/branches/OASIS3-MCT_2.0_branch/oasis3-mct@1818

File size: 2.4 KB
Line 
1
2!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
3!    Math and Computer Science Division, Argonne National Laboratory   !
4!-----------------------------------------------------------------------
5! CVS master.F90,v 1.7 2004-04-23 05:43:11 jacob Exp
6! CVS MCT_2_8_0
7!BOP -------------------------------------------------------------------
8!
9! !ROUTINE: master  -- driver for simple concurrent coupled model
10!
11! !DESCRIPTION:  Provide a simple example of using MCT to connect to
12!  components executing concurrently in a single executable.
13!
14! !INTERFACE:
15!
16      program master
17!
18! !USES:
19!
20
21      implicit none
22
23      include "mpif.h"
24
25!
26!EOP ___________________________________________________________________
27
28!     local variables
29
30      character(len=*), parameter :: mastername='master.F90'
31
32      integer, parameter :: ncomps = 2   ! Must know total number of
33                                         ! components in coupled system
34
35      integer, parameter :: AtmID = 1    ! pick an id for the atmosphere
36      integer, parameter :: CplID = 2    ! pick an id for the coupler
37
38
39
40
41! MPI variables
42      integer :: splitcomm, rank, nprocs,compid, myID, ierr,color
43      integer :: anprocs,cnprocs
44
45!-----------------------------------------------------------------------
46! The Main program.
47! We are implementing a single-executable, concurrent-execution system.
48!
49! This small main program carves up MPI_COMM_WORLD and then starts
50! each component on its own processor set.
51
52      ! Initialize MPI
53      call MPI_INIT(ierr)
54
55      ! Get basic MPI information
56      call MPI_COMM_SIZE(MPI_COMM_WORLD,nprocs,ierr)
57      call MPI_COMM_RANK(MPI_COMM_WORLD,rank,ierr)
58
59      ! Create MPI communicators for each component
60      !
61      ! each component will run on half the processors
62      !
63      ! set color
64      if (rank .lt. nprocs/2) then
65        color = 0
66      else
67        color = 1
68      endif
69
70
71      ! Split MPI_COMM_WORLD into communicators for each component.
72      call MPI_COMM_SPLIT(MPI_COMM_WORLD,color,0,splitcomm,ierr)
73
74
75      ! Start the components
76      select case (color)
77         case(0)
78            call model(splitcomm,ncomps,AtmID)
79         case(1)
80            call coupler(splitcomm,ncomps,CplID)
81         case default
82            print *, "color error, color = ", color
83      end select
84
85      ! Components are done
86      call MPI_FINALIZE(ierr)
87
88
89    end program master
Note: See TracBrowser for help on using the repository browser.