diff --git a/bld/build-namelist b/bld/build-namelist
index 36392a8113..4d1c61eb8a 100755
--- a/bld/build-namelist
+++ b/bld/build-namelist
@@ -3852,10 +3852,10 @@ if (!$simple_phys) {
if ($phys =~ /cam7/) {
#
- # moving mountains only supported by SE dycore
+ # moving mountains only supported by MPAS and SE dycore
# (since vorticity needs to be passed to physics in dp_coupling)
#
- if ( $dyn =~ /se/ ) {
+ if ( $dyn =~ /mpas|se/ ) {
add_default($nl, 'use_gw_movmtn_pbl', 'val'=>'.true.');
} else {
add_default($nl, 'use_gw_movmtn_pbl', 'val'=>'.false.');
@@ -3864,10 +3864,9 @@ if (!$simple_phys) {
my $use_gw_movmtn_pbl = $nl->get_value('use_gw_movmtn_pbl');
if ($use_gw_movmtn_pbl =~ /$TRUE/io) {
- if ( ! ($dyn =~ /se/) ) {
- die "$ProgName - ERROR: use_gw_movmtn_pbl is only available with the SE dycore \n";
-
- }
+ if ( ! ($dyn =~ /mpas|se/) ) {
+ die "$ProgName - ERROR: use_gw_movmtn_pbl is only available with MPAS and SE dycore\n";
+ }
}
add_default($nl, 'use_gw_rdg_gamma' , 'val'=>'.false.');
diff --git a/cime_config/testdefs/testlist_cam.xml b/cime_config/testdefs/testlist_cam.xml
index df05d5a1ee..c760ee7f3e 100644
--- a/cime_config/testdefs/testlist_cam.xml
+++ b/cime_config/testdefs/testlist_cam.xml
@@ -1677,6 +1677,16 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/cime_config/testdefs/testmods_dirs/cam/outfrq9s_mpasa480/user_nl_clm b/cime_config/testdefs/testmods_dirs/cam/outfrq9s_mpasa480/user_nl_clm
index c4cb9d28d6..3286576e65 100644
--- a/cime_config/testdefs/testmods_dirs/cam/outfrq9s_mpasa480/user_nl_clm
+++ b/cime_config/testdefs/testmods_dirs/cam/outfrq9s_mpasa480/user_nl_clm
@@ -1,3 +1,4 @@
+flanduse_timeseries = ''
hist_nhtfrq = 9
hist_mfilt = 1
hist_ndens = 1
diff --git a/doc/ChangeLog b/doc/ChangeLog
index 2b2b518474..7159d4708b 100644
--- a/doc/ChangeLog
+++ b/doc/ChangeLog
@@ -1,5 +1,179 @@
===============================================================
+Tag name: cam6_4_091
+Originator(s): kuanchihwang
+Date: May 8, 2025
+One-line Summary: Implement support for moving mountain gravity wave scheme in MPAS dycore
+Github PR URL: https://github.com/ESCOMP/CAM/pull/1297
+
+Purpose of changes (include the issue number and title text for each relevant GitHub issue):
+
+ This PR implements support for moving mountain gravity wave scheme in MPAS dycore.
+
+ The `use_gw_movmtn_pbl` namelist option will now default to `.true.` when MPAS dycore and CAM7
+ physics are both selected.
+
+ The moving mountain gravity wave scheme needs relative vorticities at cell points as input.
+ However, because MPAS uses staggered C-grid for spatial discretization, where wind vectors are
+ located at edge points, it calculates relative vorticities at vertex points instead. As a result,
+ this PR introduces a new functionality in MPAS subdriver to regrid vertex values to cell values.
+ The regridding functionality is also generalized so that it will work with all variables at vertex
+ points.
+
+ Subsequently, relative vorticities are passed to physics buffer during dynamics-physics coupling,
+ after which the moving mountain gravity wave scheme can query and use them as input.
+
+ Closes #1253 (Support for new moving mountains gravity wave trigger with MPAS dycore)
+ Closes #1277 (MPAS dycore support for moving mountains parameterization)
+
+Describe any changes made to build system:
+
+ None
+
+Describe any changes made to the namelist:
+
+ The `use_gw_movmtn_pbl` namelist option will now default to `.true.` when MPAS dycore and CAM7
+ physics are both selected.
+
+List any changes to the defaults for the boundary datasets:
+
+ None
+
+Describe any substantial timing or memory changes:
+
+ None
+
+Code reviewed by:
+
+ PeterHjortLauritzen, mgduda, nusbaume
+
+List all files eliminated:
+
+ None
+
+List all files added and what they do:
+
+ None
+
+List all existing files that have been modified, and describe the changes:
+
+ M bld/build-namelist
+ * Allow the moving mountain gravity wave scheme to be selected with MPAS dycore
+ M cime_config/testdefs/testlist_cam.xml
+ * Add testing for CAM7 low top with MPAS dynamical core
+ M src/dynamics/mpas/dp_coupling.F90
+ * Pass relative vorticities to physics buffer during dynamics-physics coupling
+ M src/dynamics/mpas/driver/cam_mpas_subdriver.F90
+ * Implement the computation of relative vorticities at cell points
+ M src/dynamics/mpas/dyn_comp.F90
+ * Pass relative vorticities to physics buffer during dynamics-physics coupling
+
+If there were any failures reported from running test_driver.sh on any test
+platform, and checkin with these failures has been OK'd by the gatekeeper,
+then copy the lines from the td.*.status files for the failed tests to the
+appropriate machine below. All failed tests must be justified.
+
+ NOTE:
+
+ Since cam6_4_089, `model_version`, which is a variable that stores the current model version,
+ has been introduced to `nuopc.runconfig`. However, the testing framework will detect it as a
+ significant difference and fail at "NLCOMP" incorrectly.
+
+ All tests currently exhibit "NLFAIL" in their overall results due to those "NLCOMP" failures.
+
+ They are harmless and can be ignored.
+
+derecho/intel/aux_cam:
+
+ ERS_D_Ln9.mpasa480_mpasa480.FHISTC_LTso.derecho_intel.cam-outfrq9s_mpasa480 (Overall: DIFF) details:
+ FAIL ERS_D_Ln9.mpasa480_mpasa480.FHISTC_LTso.derecho_intel.cam-outfrq9s_mpasa480 BASELINE (Baseline directory does not exist)
+ * New test
+ SMS_D_Ln9_P1280x1.ne0CONUSne30x8_ne0CONUSne30x8_mt12.FCHIST.derecho_intel.cam-outfrq9s (Overall: FAIL) details:
+ FAIL SMS_D_Ln9_P1280x1.ne0CONUSne30x8_ne0CONUSne30x8_mt12.FCHIST.derecho_intel.cam-outfrq9s SETUP
+ * Pre-existing failures due to build-namelist error requiring CLM/CTSM external update
+
+ NOTE:
+
+ SMS_D_Ln9_P1280x1.ne0ARCTICne30x4_ne0ARCTICne30x4_mt12.FHIST.derecho_intel.cam-outfrq9s (Overall: NLFAIL) details:
+ PASS SMS_D_Ln9_P1280x1.ne0ARCTICne30x4_ne0ARCTICne30x4_mt12.FHIST.derecho_intel.cam-outfrq9s RUN
+ * This test is unstable. Sometimes (like 2 out of 3) it fails at "RUN", but with enough retries it eventually passes.
+ When it does fail, a stack trace similar to the following can be reliably observed:
+
+ forrtl: error (65): floating invalid
+ Image PC Routine Line Source
+ libpthread-2.31.s 000015356741A8C0 Unknown Unknown Unknown
+ libesmf.so 000015356DD8E72A _ZN5ESMCI3XXE10ps Unknown Unknown
+ libesmf.so 000015356DD82D90 _ZN5ESMCI3XXE10ps Unknown Unknown
+ libesmf.so 000015356DD777B0 _ZN5ESMCI3XXE10ps Unknown Unknown
+ libesmf.so 000015356DD6DBB4 _ZN5ESMCI3XXE4exe Unknown Unknown
+ libesmf.so 000015356DD6D36D _ZN5ESMCI3XXE4exe Unknown Unknown
+ libesmf.so 000015356DCC66CD _ZN5ESMCI26sparse Unknown Unknown
+ libesmf.so 000015356DCBACAF _ZN5ESMCI5Array18 Unknown Unknown
+ libesmf.so 000015356DCB970E _ZN5ESMCI5Array17 Unknown Unknown
+ libesmf.so 000015356DD1E340 c_esmc_arraysmmst Unknown Unknown
+ libesmf.so 000015356E0A419F _Z19ESMCI_regrid_ Unknown Unknown
+ libesmf.so 000015356E030568 _ZN5ESMCI7MeshCap Unknown Unknown
+ libesmf.so 000015356E0E0E42 c_esmc_regrid_cre Unknown Unknown
+ libesmf.so 000015356E7787B1 esmf_regridmod_mp Unknown Unknown
+ libesmf.so 000015356E4E198C esmf_fieldregridm Unknown Unknown
+ cesm.exe 00000000080D8794 lnd_set_decomp_an 497 lnd_set_decomp_and_domain.F90
+ cesm.exe 00000000080D0F4E lnd_set_decomp_an 128 lnd_set_decomp_and_domain.F90
+ cesm.exe 00000000080A2F3F lnd_comp_nuopc_mp 644 lnd_comp_nuopc.F90
+ libesmf.so 000015356DE68020 _ZN5ESMCI6FTable1 Unknown Unknown
+ libesmf.so 000015356DE67A6C ESMCI_FTableCallE Unknown Unknown
+ libesmf.so 000015356E153DE4 _ZN5ESMCI2VM5ente Unknown Unknown
+ libesmf.so 000015356DE68704 c_esmc_ftablecall Unknown Unknown
+ libesmf.so 000015356E359B9D esmf_compmod_mp_e Unknown Unknown
+ libesmf.so 000015356E5DD298 esmf_gridcompmod_ Unknown Unknown
+ libesmf.so 000015356E999B87 nuopc_driver_mp_l Unknown Unknown
+ libesmf.so 000015356E98F82B nuopc_driver_mp_i Unknown Unknown
+ libesmf.so 000015356DE68020 _ZN5ESMCI6FTable1 Unknown Unknown
+ libesmf.so 000015356DE67A6C ESMCI_FTableCallE Unknown Unknown
+ libesmf.so 000015356E153DE4 _ZN5ESMCI2VM5ente Unknown Unknown
+ libesmf.so 000015356DE68704 c_esmc_ftablecall Unknown Unknown
+ libesmf.so 000015356E359B9D esmf_compmod_mp_e Unknown Unknown
+ libesmf.so 000015356E5DD298 esmf_gridcompmod_ Unknown Unknown
+ libesmf.so 000015356E999B87 nuopc_driver_mp_l Unknown Unknown
+ libesmf.so 000015356E98F92C nuopc_driver_mp_i Unknown Unknown
+ libesmf.so 000015356E97F883 nuopc_driver_mp_i Unknown Unknown
+ libesmf.so 000015356DE68020 _ZN5ESMCI6FTable1 Unknown Unknown
+ libesmf.so 000015356DE67A6C ESMCI_FTableCallE Unknown Unknown
+ libesmf.so 000015356E153DE4 _ZN5ESMCI2VM5ente Unknown Unknown
+ libesmf.so 000015356DE68704 c_esmc_ftablecall Unknown Unknown
+ libesmf.so 000015356E359B9D esmf_compmod_mp_e Unknown Unknown
+ libesmf.so 000015356E5DD298 esmf_gridcompmod_ Unknown Unknown
+ cesm.exe 00000000004502F8 MAIN__ 128 esmApp.F90
+ cesm.exe 000000000042711D Unknown Unknown Unknown
+ libc-2.31.so 0000153562F2429D __libc_start_main Unknown Unknown
+ cesm.exe 000000000042704A Unknown Unknown Unknown
+
+ Line 497 of lnd_set_decomp_and_domain.F90 is a call to ESMF_FieldRegridStore, which is outside of our (CAM) control.
+
+derecho/nvhpc/aux_cam:
+
+ All pass
+
+izumi/nag/aux_cam:
+
+ All pass
+
+izumi/gnu/aux_cam:
+
+ ERS_D_Ln9.mpasa480_mpasa480.FHISTC_LTso.izumi_gnu.cam-outfrq9s_mpasa480 (Overall: DIFF) details:
+ FAIL ERS_D_Ln9.mpasa480_mpasa480.FHISTC_LTso.izumi_gnu.cam-outfrq9s_mpasa480 BASELINE (Baseline directory does not exist)
+ * New test
+
+CAM tag used for the baseline comparison tests if different than previous
+tag:
+
+ cam6_4_090
+
+Summarize any changes to answers:
+
+ None
+
+===============================================================
+
Tag name: cam6_4_090
Originator(s): lizziel
Date: April 30, 2025
diff --git a/src/dynamics/mpas/dp_coupling.F90 b/src/dynamics/mpas/dp_coupling.F90
index 10d75b4b8c..231ad6c17b 100644
--- a/src/dynamics/mpas/dp_coupling.F90
+++ b/src/dynamics/mpas/dp_coupling.F90
@@ -38,15 +38,15 @@ module dp_coupling
!=========================================================================================
subroutine d_p_coupling(phys_state, phys_tend, pbuf2d, dyn_out)
- use cam_mpas_subdriver, only: cam_mpas_update_halo
+ use cam_mpas_subdriver, only: cam_mpas_update_halo, cam_mpas_vertex_to_cell_relative_vorticities
! Convert the dynamics output state into the physics input state.
! Note that all pressures and tracer mixing ratios coming from the dycore are based on
! dry air mass.
use cam_history, only: hist_fld_active
- use dyn_comp, only: frontgf_idx, frontga_idx
+ use dyn_comp, only: frontgf_idx, frontga_idx, vort4gw_idx
use mpas_constants, only: Rv_over_Rd => rvord
- use phys_control, only: use_gw_front, use_gw_front_igw
+ use phys_control, only: use_gw_front, use_gw_front_igw, use_gw_movmtn_pbl
use cam_budget, only : thermo_budget_history
! arguments
@@ -101,6 +101,12 @@ subroutine d_p_coupling(phys_state, phys_tend, pbuf2d, dyn_out)
real(r8), allocatable :: frontgf_phys(:,:,:)
real(r8), allocatable :: frontga_phys(:,:,:)
+ ! Temporary arrays to hold vorticity for the "moving mountain" gravity wave scheme.
+ real(r8), allocatable :: vort4gw(:, :) ! Data are unchunked.
+ real(r8), allocatable :: vort4gw_phys(:, :, :) ! Data are chunked.
+ ! Pointer to vorticity in physics buffer for the "moving mountain" gravity wave scheme.
+ real(r8), pointer :: pbuf_vort4gw(:, :)
+
type(physics_buffer_desc), pointer :: pbuf_chnk(:)
integer :: lchnk, icol, icol_p, k, kk ! indices over chunks, columns, physics columns and layers
@@ -116,6 +122,11 @@ subroutine d_p_coupling(phys_state, phys_tend, pbuf2d, dyn_out)
character(len=*), parameter :: subname = 'd_p_coupling'
!----------------------------------------------------------------------------
+ nullify(pbuf_chnk)
+ nullify(pbuf_frontgf)
+ nullify(pbuf_frontga)
+ nullify(pbuf_vort4gw)
+
compute_energy_diags=thermo_budget_history
nCellsSolve = dyn_out % nCellsSolve
@@ -155,9 +166,6 @@ subroutine d_p_coupling(phys_state, phys_tend, pbuf2d, dyn_out)
if (use_gw_front .or. use_gw_front_igw) then
call cam_mpas_update_halo('scalars', endrun) ! scalars is the name of tracers in the MPAS state pool
- nullify(pbuf_chnk)
- nullify(pbuf_frontgf)
- nullify(pbuf_frontga)
!
! compute frontogenesis function and angle for gravity wave scheme
!
@@ -195,6 +203,16 @@ subroutine d_p_coupling(phys_state, phys_tend, pbuf2d, dyn_out)
end if
+ if (use_gw_movmtn_pbl) then
+ call cam_mpas_vertex_to_cell_relative_vorticities(vort4gw)
+
+ allocate(vort4gw_phys(pcols, pver, begchunk:endchunk), stat=ierr)
+
+ if (ierr /= 0) then
+ call endrun(subname // ': Failed to allocate vort4gw_phys')
+ end if
+ end if
+
call t_startf('dpcopy')
ncols = columns_on_task
@@ -225,6 +243,10 @@ subroutine d_p_coupling(phys_state, phys_tend, pbuf2d, dyn_out)
frontgf_phys(icol_p, k, lchnk) = frontogenesisFunction(kk, i)
frontga_phys(icol_p, k, lchnk) = frontogenesisAngle(kk, i)
end if
+
+ if (use_gw_movmtn_pbl) then
+ vort4gw_phys(icol_p, k, lchnk) = vort4gw(kk, i)
+ end if
end do
do k = 1, pverp
@@ -261,6 +283,25 @@ subroutine d_p_coupling(phys_state, phys_tend, pbuf2d, dyn_out)
deallocate(frontogenesisAngle)
end if
+ if (use_gw_movmtn_pbl) then
+ !$omp parallel do private (lchnk, ncols, icol, k, pbuf_chnk, pbuf_vort4gw)
+ do lchnk = begchunk, endchunk
+ ncols = get_ncols_p(lchnk)
+ pbuf_chnk => pbuf_get_chunk(pbuf2d, lchnk)
+
+ call pbuf_get_field(pbuf_chnk, vort4gw_idx, pbuf_vort4gw)
+
+ do k = 1, pver
+ do icol = 1, ncols
+ pbuf_vort4gw(icol, k) = vort4gw_phys(icol, k, lchnk)
+ end do
+ end do
+ end do
+
+ deallocate(vort4gw)
+ deallocate(vort4gw_phys)
+ end if
+
call t_stopf('dpcopy')
call t_startf('derived_phys')
diff --git a/src/dynamics/mpas/driver/cam_mpas_subdriver.F90 b/src/dynamics/mpas/driver/cam_mpas_subdriver.F90
index f456967484..ab9b6073d5 100644
--- a/src/dynamics/mpas/driver/cam_mpas_subdriver.F90
+++ b/src/dynamics/mpas/driver/cam_mpas_subdriver.F90
@@ -31,6 +31,7 @@ module cam_mpas_subdriver
cam_mpas_compute_unit_vectors, &
cam_mpas_update_halo, &
cam_mpas_cell_to_edge_winds, &
+ cam_mpas_vertex_to_cell_relative_vorticities, &
cam_mpas_run, &
cam_mpas_finalize, &
cam_mpas_debug_stream, &
@@ -2213,6 +2214,116 @@ subroutine cam_mpas_cell_to_edge_winds(nEdges, uZonal, uMerid, east, north, edge
end subroutine cam_mpas_cell_to_edge_winds
+ !-------------------------------------------------------------------------------
+ ! subroutine cam_mpas_vertex_to_cell_relative_vorticities
+ !
+ !> \brief Compute the relative vorticities at cell points.
+ !> \author Kuan-Chih Wang
+ !> \date 2025-04-12
+ !> \details
+ !> MPAS uses staggered C-grid for spatial discretization, where relative
+ !> vorticities are located at vertex points because wind vectors are located at
+ !> edge points. However, physics schemes that use relative vorticities as input
+ !> usually want them at cell points instead.
+ !> This subroutine computes the relative vorticity at each cell point from its
+ !> surrounding vertex points and returns the results.
+ !
+ !-------------------------------------------------------------------------------
+ subroutine cam_mpas_vertex_to_cell_relative_vorticities(cell_relative_vorticity)
+ use mpas_derived_types, only: mpas_pool_type
+ use mpas_kind_types, only: rkind
+ use mpas_pool_routines, only: mpas_pool_get_subpool, mpas_pool_get_dimension, mpas_pool_get_array
+
+ real(rkind), allocatable, intent(out) :: cell_relative_vorticity(:, :)
+
+ character(*), parameter :: subname = 'cam_mpas_subdriver::cam_mpas_vertex_to_cell_relative_vorticities'
+ integer :: i, k
+ integer :: ierr
+ integer, pointer :: ncellssolve, nvertlevels
+ integer, pointer :: kiteforcell(:, :), nedgesoncell(:), verticesoncell(:, :)
+ real(rkind), pointer :: areacell(:), kiteareasonvertex(:, :), vorticity(:, :)
+ type(mpas_pool_type), pointer :: mpas_pool_diag, mpas_pool_mesh
+
+ nullify(ncellssolve, nvertlevels)
+ nullify(kiteforcell, nedgesoncell, verticesoncell)
+ nullify(areacell, kiteareasonvertex, vorticity)
+ nullify(mpas_pool_diag, mpas_pool_mesh)
+
+ call mpas_pool_get_subpool(domain_ptr % blocklist % allstructs, 'diag', mpas_pool_diag)
+ call mpas_pool_get_subpool(domain_ptr % blocklist % allstructs, 'mesh', mpas_pool_mesh)
+
+ ! Input.
+ call mpas_pool_get_dimension(mpas_pool_mesh, 'nCellsSolve', ncellssolve)
+ call mpas_pool_get_dimension(mpas_pool_mesh, 'nVertLevels', nvertlevels)
+
+ call mpas_pool_get_array(mpas_pool_mesh, 'kiteForCell', kiteforcell)
+ call mpas_pool_get_array(mpas_pool_mesh, 'nEdgesOnCell', nedgesoncell)
+ call mpas_pool_get_array(mpas_pool_mesh, 'verticesOnCell', verticesoncell)
+
+ call mpas_pool_get_array(mpas_pool_mesh, 'areaCell', areacell)
+ call mpas_pool_get_array(mpas_pool_mesh, 'kiteAreasOnVertex', kiteareasonvertex)
+ call mpas_pool_get_array(mpas_pool_diag, 'vorticity', vorticity)
+
+ ! Output.
+ allocate(cell_relative_vorticity(nvertlevels, ncellssolve), stat=ierr)
+
+ if (ierr /= 0) then
+ call endrun(subname // ': Failed to allocate cell_relative_vorticity')
+ end if
+
+ do i = 1, ncellssolve
+ do k = 1, nvertlevels
+ cell_relative_vorticity(k, i) = regrid_from_vertex_to_cell(i, k, &
+ nedgesoncell, verticesoncell, kiteforcell, kiteareasonvertex, areacell, &
+ vorticity)
+ end do
+ end do
+
+ nullify(ncellssolve, nvertlevels)
+ nullify(kiteforcell, nedgesoncell, verticesoncell)
+ nullify(areacell, kiteareasonvertex, vorticity)
+ nullify(mpas_pool_diag, mpas_pool_mesh)
+ end subroutine cam_mpas_vertex_to_cell_relative_vorticities
+
+ !-------------------------------------------------------------------------------
+ ! function regrid_from_vertex_to_cell
+ !
+ !> \brief Regrid values from vertex points to the specified cell point.
+ !> \author Kuan-Chih Wang
+ !> \date 2025-04-12
+ !> \details
+ !> This function computes the area weighted average (i.e., `cell_value`) at the
+ !> specified cell point (i.e., `cell_index` and `cell_level`) from the values
+ !> at its surrounding vertex points (i.e., `vertex_value`).
+ !> The formulation used here is adapted and generalized from the
+ !> `atm_compute_solve_diagnostics` subroutine in MPAS.
+ !
+ !-------------------------------------------------------------------------------
+ pure function regrid_from_vertex_to_cell(cell_index, cell_level, &
+ nverticesoncell, verticesoncell, kiteforcell, kiteareasonvertex, areacell, &
+ vertex_value) result(cell_value)
+ use mpas_kind_types, only: rkind
+
+ integer, intent(in) :: cell_index, cell_level
+ integer, intent(in) :: nverticesoncell(:), verticesoncell(:, :), kiteforcell(:, :)
+ real(rkind), intent(in) :: kiteareasonvertex(:, :), areacell(:)
+ real(rkind), intent(in) :: vertex_value(:, :)
+ real(rkind) :: cell_value
+
+ integer :: i, j, vertex_index
+
+ cell_value = 0.0_rkind
+
+ do i = 1, nverticesoncell(cell_index)
+ j = kiteforcell(i, cell_index)
+ vertex_index = verticesoncell(i, cell_index)
+
+ cell_value = cell_value + &
+ kiteareasonvertex(j, vertex_index) * vertex_value(cell_level, vertex_index)
+ end do
+
+ cell_value = cell_value / areacell(cell_index)
+ end function regrid_from_vertex_to_cell
!-----------------------------------------------------------------------
! routine cam_mpas_run
diff --git a/src/dynamics/mpas/dyn_comp.F90 b/src/dynamics/mpas/dyn_comp.F90
index a82978f2cf..7513da91fa 100644
--- a/src/dynamics/mpas/dyn_comp.F90
+++ b/src/dynamics/mpas/dyn_comp.F90
@@ -38,7 +38,7 @@ module dyn_comp
use cam_budget, only: cam_budget_em_snapshot, cam_budget_em_register
-use phys_control, only: use_gw_front, use_gw_front_igw
+use phys_control, only: use_gw_front, use_gw_front_igw, use_gw_movmtn_pbl
implicit none
private
@@ -234,6 +234,9 @@ module dyn_comp
integer, public :: frontgf_idx = -1
integer, public :: frontga_idx = -1
+! Index of vorticity in physics buffer for the "moving mountain" gravity wave scheme.
+integer, protected, public :: vort4gw_idx = -1
+
real(r8), parameter :: rad2deg = 180.0_r8 / pi
real(r8), parameter :: deg2rad = pi / 180.0_r8
@@ -321,6 +324,11 @@ subroutine dyn_register()
call pbuf_add_field("FRONTGA", "global", dtype_r8, (/pcols,pver/), frontga_idx)
end if
+ if (use_gw_movmtn_pbl) then
+ ! Add vorticity field to physics buffer for the "moving mountain" gravity wave scheme.
+ ! This field will be updated during dynamics-physics coupling.
+ call pbuf_add_field("VORT4GW", "global", dtype_r8, (/pcols, pver/), vort4gw_idx)
+ end if
end subroutine dyn_register
!=========================================================================================