MODULE caldyn_unstructured_mod USE ISO_C_BINDING USE OMP_LIB #ifdef CPP_USING_XIOS USE xios #endif IMPLICIT NONE PRIVATE SAVE #define BINDC_(thename) BIND(C, name=#thename) #define BINDC(thename) BINDC_(dynamico_ ## thename) ! #define DBL REAL(C_DOUBLE) #define DOUBLE1(m) DBL, DIMENSION(m) #define DOUBLE2(m,n) DBL, DIMENSION(m,n) #define DOUBLE3(m,n,p) DBL, DIMENSION(m,n,p) #define INDEX INTEGER(C_INT) #define PHI_BOT(ij) Phi_bot #define PHI_BOT_VAR 0. ! INTEGER, PARAMETER :: eta_mass=1, eta_lag=2, & thermo_theta=1, thermo_entropy=2, thermo_moist=3, thermo_boussinesq=4, & caldyn_vert_cons=1 INTEGER(C_INT), BIND(C) :: caldyn_thermo=thermo_theta, caldyn_eta=eta_lag, & caldyn_vert_variant=caldyn_vert_cons, nb_threads=0 LOGICAL(C_BOOL), BIND(C) :: hydrostatic=.TRUE., debug_hevi_solver=.TRUE., rigid=.TRUE. ! INDEX, BIND(C) :: llm, nqdyn, edge_num, primal_num, dual_num, & max_primal_deg, max_dual_deg, max_trisk_deg INDEX, ALLOCATABLE, PRIVATE :: & ! deg(ij) = nb of vertices = nb of edges of primal/dual cell ij primal_deg(:), primal_edge(:,:), primal_vertex(:,:), primal_ne(:,:), & dual_deg(:), dual_edge(:,:), dual_vertex(:,:), dual_ne(:,:), & trisk_deg(:), trisk(:,:), & left(:), right(:), up(:), down(:) ! left and right are adjacent primal cells ! flux is positive when going from left to right ! up and down are adjacent dual cells ! circulation is positive when going from down to up ! DBL, BIND(C) :: elapsed, g, ptop, cpp, cppv, Rd, Rv, preff, Treff, & pbot, Phi_bot, rho_bot DBL :: kappa DOUBLE1(:), ALLOCATABLE, PRIVATE :: le_de, fv, Av, Ai DOUBLE2(:,:), ALLOCATABLE, PRIVATE :: Riv2, wee, ap,bp, mass_bl, mass_dak, mass_dbk ! INTEGER(C_INT), BIND(C) :: comm_icosa #ifdef CPP_USING_XIOS TYPE(xios_context) :: ctx_hdl #endif CONTAINS ! !-------------------------------------- KERNELS -------------------------------------- ! ! #define DECLARE_INDICES INTEGER ij,l,iq,iedge,edge,ivertex,vertex,ij_left,ij_right,ij_up,ij_down,itrisk,edge_trisk,kup,kdown #define FIELD_PS DOUBLE1(primal_num) #define FIELD_MASS DOUBLE2(llm, primal_num) #define FIELD_Z DOUBLE2(llm, dual_num) #define FIELD_U DOUBLE2(llm, edge_num) #define FIELD_UL DOUBLE2(llm+1, edge_num) #define FIELD_THETA DOUBLE3(llm, primal_num, nqdyn) #define FIELD_GEOPOT DOUBLE2(llm+1, primal_num) ! #define HASNAN(field) (ANY(.NOT.ABS(field)<1e20)) ! SUBROUTINE caldyn_unstructured(tau, mass_col,rhodz,theta_rhodz,u,geopot,w, & ! IN : flow state theta,ps,pk,hflux,qv, & ! OUT : diags (except surface geopot : IN) dmass_col,drhodz,dtheta_rhodz,du_fast,du_slow, & dPhi_fast, dPhi_slow, dW_fast, dW_slow) BINDC(caldyn_unstructured) ! OUT : tendencies DBL, VALUE :: tau FIELD_MASS :: rhodz, drhodz, pk, berni ! IN, OUT, DIAG FIELD_THETA :: theta_rhodz, dtheta_rhodz, theta ! IN, OUT, DIAG FIELD_GEOPOT :: wflux, w, geopot, & ! DIAG, INOUT dPhi_fast, dPhi_slow, dW_fast, dW_slow ! OUT FIELD_U :: u,du_fast,du_slow,hflux,qu ! INOUT,OUT,OUT,DIAG FIELD_Z :: qv ! DIAG FIELD_PS :: ps,dmass_col,mass_col ! OUT,OUT,IN (if eta_mass) or OUT,UNUSED,UNUSED (if eta_lag) DOUBLE2(llm+1, edge_num) :: wwuu DBL :: time1,time2 INTEGER :: ij ! CALL CPU_TIME(time1) time1=OMP_GET_WTIME() IF(hydrostatic) THEN !$OMP PARALLEL NUM_THREADS(nb_threads) !$OMP DO SCHEDULE(STATIC) DO ij=1,edge_num du_fast(:,ij)=0. du_slow(:,ij)=0. END DO !$OMP END DO CALL compute_theta(mass_col,rhodz,theta_rhodz, theta) CALL compute_geopot(rhodz,theta, ps,pk,geopot) CALL compute_caldyn_fast(tau, pk,berni,theta,geopot, du_fast,u) CALL compute_pvort_only(rhodz,u,qv,qu) CALL compute_caldyn_slow_hydro(rhodz,theta,u, berni,hflux,du_slow) CALL compute_coriolis(hflux,theta,qu, drhodz,dtheta_rhodz,du_slow) IF(caldyn_eta == eta_mass) THEN CALL caldyn_vert(drhodz,rhodz,theta,u, dmass_col,wflux,dtheta_rhodz,du_slow,wwuu) END IF !$OMP END PARALLEL ELSE ! NH DO ij=1,edge_num du_fast(:,ij)=0. du_slow(:,ij)=0. END DO DO ij=1,primal_num wflux(1,ij)=0. wflux(llm+1,ij)=0. END DO CALL compute_theta(mass_col,rhodz,theta_rhodz, theta) CALL compute_caldyn_solver(tau,rhodz,theta,pk,geopot,W,dPhi_fast,dW_fast,du_fast) CALL compute_caldyn_fast(tau, pk,berni,theta,geopot, du_fast,u) CALL compute_pvort_only(rhodz,u,qv,qu) CALL compute_caldyn_slow_NH(u,rhodz,geopot,W, hflux,du_slow,dPhi_slow,dW_slow) CALL compute_coriolis(hflux,theta,qu, drhodz,dtheta_rhodz,du_slow) IF(caldyn_eta == eta_mass) THEN CALL caldyn_vert(drhodz,rhodz,theta,u, dmass_col,wflux,dtheta_rhodz,du_slow,wwuu) CALL compute_caldyn_vert_NH(rhodz,geopot,W,wflux, du_slow,dPhi_slow,dW_slow) END IF END IF time2=OMP_GET_WTIME() ! CALL CPU_TIME(time2) elapsed = elapsed + time2-time1 END SUBROUTINE caldyn_unstructured ! !----------------------------- Non-Hydrostatic ----------------------------- ! SUBROUTINE compute_NH_geopot(tau,dummy, m_ik, m_il, theta, W_il, Phi_il) FIELD_MASS :: m_ik, theta, p_ik, A_ik, C_ik ! IN*2,LOCAL*3 FIELD_GEOPOT :: m_il, W_il, Phi_il, Phi_star_il, R_il, x_il, B_il, D_il ! IN,INOUT*2, LOCAL*5 DBL :: tau, dummy, gamma, rho_ij, X_ij, Y_ij, wil, tau2_g, g2, gm2, ml_g2, c2_mik DECLARE_INDICES INTEGER :: iter #include "../kernels_unst/compute_NH_geopot.k90" END SUBROUTINE compute_NH_geopot ! SUBROUTINE compute_caldyn_slow_NH(u,rhodz,Phi,W, hflux,du,dPhi,dW) FIELD_U :: u, hflux, du ! IN, OUT, OUT FIELD_MASS :: rhodz, berni ! IN, LOCAL FIELD_GEOPOT :: Phi,W,dPhi,dW, w_il, gradPhi2 ! IN,IN, OUT,OUT, LOCAL FIELD_UL :: DePhil, v_el, G_el, F_el ! LOCAL DECLARE_INDICES DBL :: W_el, W2_el, gPhi2, dP, divG, u2, uu #include "../kernels_unst/caldyn_slow_NH.k90" END SUBROUTINE compute_caldyn_slow_NH ! SUBROUTINE compute_caldyn_solver(tau,rhodz,theta,pk,geopot,W,dPhi,dW,du) DBL, INTENT(IN) :: tau FIELD_MASS :: rhodz,pk,berni,pres ! IN, OUT, LOCAL FIELD_THETA :: theta ! IN FIELD_GEOPOT :: geopot,W,dPhi,dW, m_il ! INOUT,INOUT, OUT,OUT, LOCAL FIELD_U :: du ! OUT DECLARE_INDICES DBL :: X_ij, rho_ij, T_ij, gamma, Cvd, vreff #include "../kernels_unst/caldyn_solver.k90" END SUBROUTINE compute_caldyn_solver ! SUBROUTINE compute_caldyn_vert_NH(mass,geopot,W,wflux, du,dPhi,dW) FIELD_MASS :: mass, eta_dot, wcov, W_etadot FIELD_GEOPOT :: geopot,W,wflux,dPhi,dW FIELD_U :: du DECLARE_INDICES DBL :: w_ij, wflux_ij #include "../kernels_unst/caldyn_vert_NH.k90" END SUBROUTINE compute_caldyn_vert_NH ! !----------------------------- Hydrostatic ------------------------------- ! SUBROUTINE compute_geopot(rhodz,theta,ps,pk,geopot) FIELD_MASS :: rhodz,pk ! IN, OUT FIELD_THETA :: theta ! IN FIELD_GEOPOT :: geopot ! IN(l=1)/OUT(l>1) FIELD_PS :: ps ! OUT DECLARE_INDICES DBL :: gdz, ke, uu, chi, gv, exner_ik, temp_ik, p_ik, qv, Rmix #include "../kernels_unst/compute_geopot.k90" END SUBROUTINE compute_geopot ! SUBROUTINE compute_caldyn_slow_hydro(rhodz,theta,u, berni,hflux,du) FIELD_MASS :: rhodz,berni! IN FIELD_THETA :: theta ! IN FIELD_U :: u,hflux,du ! IN, OUT, OUT DECLARE_INDICES LOGICAL, PARAMETER :: zero=.TRUE. DBL :: ke, uu #include "../kernels_unst/caldyn_slow_hydro.k90" END SUBROUTINE compute_caldyn_slow_hydro ! !---------------------------------- Generic ------------------------------ ! SUBROUTINE caldyn_vert(convm,rhodz,theta,u, dmass_col,wflux,dtheta_rhodz,du, wwuu) FIELD_PS :: dmass_col FIELD_MASS :: convm, rhodz FIELD_U :: u,du FIELD_THETA :: dtheta_rhodz, theta FIELD_GEOPOT :: wflux DOUBLE2(llm+1, edge_num) :: wwuu DECLARE_INDICES DBL :: dF_deta, dFu_deta wwuu=0. #include "../kernels_unst/caldyn_wflux.k90" #include "../kernels_unst/caldyn_dmass.k90" #include "../kernels_unst/caldyn_vert.k90" END SUBROUTINE caldyn_vert ! SUBROUTINE compute_coriolis(hflux,theta,qu, convm,dtheta_rhodz,du) FIELD_U :: hflux, Ftheta, qu, du FIELD_MASS :: convm FIELD_THETA :: theta, dtheta_rhodz DECLARE_INDICES DBL :: divF, du_trisk #include "../kernels_unst/coriolis.k90" END SUBROUTINE ! SUBROUTINE compute_theta(mass_col,rhodz,theta_rhodz, theta) FIELD_PS :: mass_col FIELD_MASS :: rhodz FIELD_THETA :: theta, theta_rhodz DECLARE_INDICES DBL :: m #include "../kernels_unst/theta.k90" END SUBROUTINE ! SUBROUTINE compute_pvort_only(rhodz,u,qv,qu) FIELD_MASS :: rhodz FIELD_U :: u,qu FIELD_Z :: qv DECLARE_INDICES DBL :: etav, hv #include "../kernels_unst/pvort_only.k90" END SUBROUTINE compute_pvort_only ! SUBROUTINE compute_caldyn_fast(tau, pk,berni,theta,geopot, du,u) DBL, INTENT(IN) :: tau FIELD_MASS :: pk,berni ! INOUT, OUT FIELD_THETA :: theta ! IN FIELD_GEOPOT :: geopot ! IN FIELD_U :: u,du ! INOUT,INOUT DECLARE_INDICES DBL :: due #include "../kernels_unst/caldyn_fast.k90" END SUBROUTINE compute_caldyn_fast ! !----------------------------- Unused ----------------------------- ! SUBROUTINE gradient(b,grad) BINDC(gradient) DOUBLE2(llm,primal_num) :: b DOUBLE2(llm,edge_num) :: grad DECLARE_INDICES #include "../kernels_unst/gradient.k90" END SUBROUTINE ! SUBROUTINE div(u,divu) BINDC(div) DOUBLE2(llm,primal_num) :: divu DOUBLE2(llm,edge_num) :: u DECLARE_INDICES DBL :: div_ij !$OMP PARALLEL NUM_THREADS(nb_threads) #include "../kernels_unst/div.k90" !$OMP END PARALLEL END SUBROUTINE ! !---------------------------- CONTEXT INITIALIZATION -------------------------- ! #define ALLOC1(v,n1) IF(ALLOCATED(v)) DEALLOCATE(v) ; ALLOCATE(v(n1)) #define ALLOC2(v,n1,n2) IF(ALLOCATED(v)) DEALLOCATE(v) ; ALLOCATE(v(n1,n2)) SUBROUTINE init_mesh( & primal_deg_, primal_edge_, primal_ne_, & dual_deg_, dual_edge_, dual_ne_, dual_vertex_, & left_, right_, up_, down_ ,& trisk_deg_, trisk_) BINDC(init_mesh) INDEX :: primal_deg_(primal_num), primal_edge_(max_primal_deg,primal_num), & primal_ne_(max_primal_deg,primal_num), & dual_deg_(dual_num), dual_edge_(max_dual_deg,dual_num), & dual_ne_(max_dual_deg,dual_num), & dual_vertex_(max_dual_deg,dual_num), & trisk_deg_(edge_num), trisk_(max_trisk_deg, edge_num) INDEX, DIMENSION(edge_num) :: left_, right_, down_, up_ PRINT *, 'init_mesh ...' PRINT *, 'Primal mesh : ', primal_num, max_primal_deg PRINT *, 'Dual mesh : ', dual_num, max_dual_deg PRINT *, 'Edge mesh : ', edge_num, max_trisk_deg PRINT *, 'Vertical levels :', llm ALLOC1(primal_deg, primal_num) ALLOC2(primal_edge, max_primal_deg,primal_num) ALLOC2(primal_ne, max_primal_deg,primal_num) ALLOC1(dual_deg,dual_num) ALLOC2(dual_edge, max_dual_deg,dual_num) ALLOC2(dual_ne, max_dual_deg,dual_num) ALLOC2(dual_vertex, max_dual_deg,dual_num) ALLOC1(trisk_deg, edge_num) ALLOC2(trisk, max_trisk_deg, edge_num) PRINT *, SHAPE(trisk), edge_num ALLOC1(left, edge_num) ALLOC1(right, edge_num) ALLOC1(up, edge_num) ALLOC1(down, edge_num) primal_deg(:) = primal_deg_(:) primal_edge(:,:) = primal_edge_(:,:) primal_ne(:,:) = primal_ne_(:,:) dual_deg(:) = dual_deg_(:) dual_edge(:,:) = dual_edge_(:,:) dual_ne(:,:) = dual_ne_(:,:) dual_vertex(:,:) = dual_vertex_(:,:) IF(MINVAL(dual_deg)<3) THEN STOP 'At least one dual cell has less than 3 vertices' END IF IF(MINVAL(primal_deg)<3) THEN STOP 'At least one primal cell has less than 3 vertices' END IF left(:)=left_(:) right(:)=right_(:) down(:)=down_(:) up=up_(:) trisk_deg(:)=trisk_deg_(:) trisk(:,:)=trisk_(:,:) PRINT *, MAXVAL(primal_edge), edge_num PRINT *, MAXVAL(dual_edge), edge_num PRINT *, MAXVAL(dual_vertex), dual_num PRINT *, MAXVAL(trisk), edge_num PRINT *, MAX(MAXVAL(left),MAXVAL(right)), primal_num PRINT *, MAX(MAXVAL(up),MAXVAL(down)), dual_num PRINT *, SHAPE(trisk), edge_num PRINT *,' ... Done.' END SUBROUTINE init_mesh SUBROUTINE init_metric(Ai_, Av_, fv_, le_de_, Riv2_, wee_) BINDC(init_metric) DOUBLE1(primal_num) :: Ai_ DOUBLE1(dual_num) :: Av_, fv_ DOUBLE1(edge_num) :: le_de_ DOUBLE2(max_dual_deg,dual_num) :: Riv2_ DOUBLE2(max_trisk_deg,edge_num) :: wee_ PRINT *, 'init_metric ...' ALLOC1(Ai,primal_num) ALLOC1(Av,dual_num) ALLOC1(fv,dual_num) ALLOC1(le_de,edge_num) ALLOC2(Riv2, max_dual_deg, dual_num) ALLOC2(wee, max_trisk_deg, edge_num) Ai(:) = Ai_(:) Av(:) = Av_(:) fv(:) = fv_(:) le_de(:) = le_de_(:) Riv2(:,:)=Riv2_(:,:) wee(:,:) = wee_(:,:) PRINT *, MAXVAL(ABS(Ai)) PRINT *, MAXVAL(ABS(Av)) PRINT *, MAXVAL(ABS(fv)) PRINT *, MAXVAL(ABS(le_de)) PRINT *, MAXVAL(ABS(Riv2)) PRINT *, MAXVAL(ABS(wee)) PRINT *, MINVAL(right), MAXVAL(right) PRINT *, MINVAL(right), MAXVAL(left) PRINT *,' ... Done.' IF(nb_threads==0) nb_threads=OMP_GET_MAX_THREADS() PRINT *,'OpenMP : max_threads, num_procs, nb_threads', OMP_GET_MAX_THREADS(), omp_get_num_procs(), nb_threads END SUBROUTINE init_metric ! SUBROUTINE init_params() BINDC(init_params) PRINT *, 'Setting physical parameters ...' IF(hydrostatic) THEN PRINT *, 'Hydrostatic dynamics (HPE)' ELSE PRINT *, 'Non-hydrostatic dynamics (Euler)' END IF kappa = Rd/cpp PRINT *, 'g = ',g PRINT *, 'preff = ',preff PRINT *, 'Treff = ',Treff PRINT *, 'Rd = ',Rd PRINT *, 'cpp = ',cpp PRINT *, 'kappa = ',kappa PRINT *, '... Done' END SUBROUTINE init_params ! SUBROUTINE init_hybrid(bl,dak,dbk) BINDC(init_hybrid) DOUBLE2(llm+1, primal_num) :: bl DOUBLE2(llm, primal_num) :: dak,dbk PRINT *, 'Setting hybrid coefficients ...' ALLOC2(mass_bl, llm+1, primal_num) ALLOC2(mass_dak, llm, primal_num) ALLOC2(mass_dbk, llm, primal_num) mass_bl(:,:) = bl(:,:) mass_dak(:,:) = dak(:,:) mass_dbk(:,:) = dbk(:,:) PRINT *, '... Done, llm = ', llm END SUBROUTINE Init_hybrid !---------------------------------------------- XIOS ----------------------------------------- #ifdef CPP_USING_XIOS SUBROUTINE setup_xios() BINDC(setup_xios) ! MPI_INIT / MPI_finalize are assumed to be called BEFORE/AFTER this routine ! This is the case when calling from Python after importing mpi4py INTEGER :: ierr, mpi_threading_mode PRINT *, 'Initialize XIOS and obtain our communicator' CALL xios_initialize("icosagcm",return_comm=comm_icosa) PRINT *, 'Initialize our XIOS context' CALL xios_context_initialize("icosagcm",comm_icosa) CALL xios_get_handle("icosagcm",ctx_hdl) CALL xios_set_current_context(ctx_hdl) ! CALL xios_set_axis_attr("lev",n_glo=llm ,value=lev_value) ; ! CALL xios_set_axis_attr("levp1",n_glo=llm+1 ,value=lev_valuep1) ; ! CALL xios_set_axis_attr("nq",n_glo=nqtot, value=nq_value) ; ! CALL xios_set_domaingroup_attr("i",ni_glo=ncell_tot, ibegin=displ, ni=ncell) ! CALL xios_set_domaingroup_attr("i", data_dim=1, type='unstructured' , nvertex=6, i_index=ind_glo) ! CALL xios_set_domaingroup_attr("i",lonvalue_1d=lon, latvalue_1d=lat, bounds_lon_1d=bounds_lon, bounds_lat_1d=bounds_lat) ! CALL xios_set_domain_attr("u",ni_glo=ncell_tot, ibegin=displ, ni=ncell) ! CALL xios_set_domain_attr("u", data_dim=1, type='unstructured' , nvertex=2, i_index=ind_glo) ! CALL xios_set_domain_attr("u",lonvalue_1d=lon, latvalue_1d=lat, bounds_lon_1d=bounds_lon, bounds_lat_1d=bounds_lat) ! CALL xios_set_domain_attr("v",ni_glo=ncell_tot, ibegin=displ, ni=ncell) ! CALL xios_set_domain_attr("v", data_dim=1, type='unstructured' , nvertex=3) ! CALL xios_set_domain_attr("v",lonvalue_1d=lon, latvalue_1d=lat, bounds_lon_1d=bounds_lon, bounds_lat_1d=bounds_lat) ! CALL xios_set_timestep(dtime) ! CALL xios_set_fieldgroup_attr("standard_output", freq_op=itau_out*xios_timestep, freq_offset=(itau_out-1)*xios_timestep) ! CALL xios_close_context_definition() ! PRINT *, 'Read data' ! CALL xios_recv_field(name,field) ! PRINT *,'Write data' ! CALL xios_send_field(name,field_tmp) ! PRINT *, 'Flush to disk, clean up and die' ! CALL xios_context_finalize END SUBROUTINE setup_xios ! SUBROUTINE call_xios_set_timestep(dt) BINDC(xios_set_timestep) DBL, VALUE :: dt TYPE(xios_duration) :: dtime dtime%second=dt CALL xios_set_timestep(dtime) END SUBROUTINE call_xios_set_timestep SUBROUTINE call_xios_update_calendar(step) BINDC(xios_update_calendar) INDEX, value :: step CALL xios_update_calendar(step) END SUBROUTINE call_xios_update_calendar #endif END MODULE caldyn_unstructured_mod