Skip to content

Commit

Permalink
Merge branch 'data_yaml_updates' of github.com:uramirez8707/FMS into …
Browse files Browse the repository at this point in the history
…yaml_ready
  • Loading branch information
uramirez8707 committed Nov 30, 2023
2 parents fa89714 + 81b3f7f commit 284075a
Show file tree
Hide file tree
Showing 13 changed files with 258 additions and 115 deletions.
7 changes: 6 additions & 1 deletion .github/workflows/github_autotools_gnu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@ name: Build libFMS test with autotools

on: [push, pull_request]

# cancel running jobs if theres a newer push
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
build:
runs-on: ubuntu-latest
Expand All @@ -22,7 +27,7 @@ jobs:
SKIP_TESTS: "test_yaml_parser.5" # temporary till fixes are in
steps:
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
- name: Prepare GNU autoconf for build
run: autoreconf -if
- name: Configure the build
Expand Down
8 changes: 7 additions & 1 deletion .github/workflows/github_autotools_intel.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,10 @@
on: pull_request

# cancel running jobs if theres a newer push
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
intel-autotools:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -47,7 +53,7 @@ jobs:
./configure --prefix=/libs
make -j install && cd
- name: checkout
uses: actions/checkout@v2
uses: actions/checkout@v4
- name: Configure
run: autoreconf -if ./configure.ac && ./configure --with-yaml
- name: Compile
Expand Down
7 changes: 6 additions & 1 deletion .github/workflows/github_cmake_gnu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@ name: Build libFMS with cmake

on: [push, pull_request]

# cancel running jobs if theres a newer push
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
build:
runs-on: ubuntu-latest
Expand All @@ -16,7 +21,7 @@ jobs:
CMAKE_FLAGS: "${{ matrix.omp-flags }} ${{ matrix.io-flag }} ${{ matrix.libyaml-flag }} -D64BIT=on"
steps:
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
- name: Generate makefiles with CMake
run: cmake $CMAKE_FLAGS .
- name: Build the library
Expand Down
9 changes: 7 additions & 2 deletions .github/workflows/github_coupler_gnu.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
name: Test coupler build
on: [pull_request]

# cancel running jobs if theres a newer push
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
coupler-build:
runs-on: ubuntu-latest
Expand All @@ -17,11 +22,11 @@ jobs:
LDFLAGS: '-L/opt/view/lib'
steps:
- name: Checkout FMS
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
path: FMS
- name: Checkout FMScoupler
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
repository: 'NOAA-GFDL/FMScoupler'
path: FMScoupler
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/github_doc_site.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
- name: Setup repo
run: | # do autotool's job for substitutes since we don't need a full build environement
mkdir gen_docs
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/github_linter.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
- name: Run Lint
uses: NOAA-GFDL/simple_lint@f5aa1fe976bd4c231db0536ba00cbfdc26708253
with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/version.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ jobs:
add-dev-to-version:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Append version with dev
run: sed -i '/20[0-9][0-9]\.[0-9][0-9]/ s/]/-dev]/' configure.ac
- name: Create pull request
Expand Down
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@ foreach(kind ${kinds})
if ( CMAKE_Fortran_COMPILER_VERSION MATCHES "1[0-9]\.[0-9]*\.[0-9]*" AND CMAKE_Fortran_COMPILER_ID MATCHES "GNU")
if(MPI_C_COMPILER MATCHES ".*mpich.*" )
message(STATUS "Adding -fallow-argument-mismatch flag to compile with GCC >=10 and MPICH")
set_target_properties(${libTgt}_f PROPERTIES COMPILE_FLAGS "-fallow-argument-mismatch -w")
target_compile_options(${libTgt}_f PRIVATE "-fallow-argument-mismatch;-w")
endif()
endif()

Expand Down
21 changes: 19 additions & 2 deletions Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -79,15 +79,15 @@ pkgconfigdir = $(libdir)/pkgconfig
pkgconfig_DATA = FMS.pc

## Build libFMS module
AM_CPPFLAGS = -I${top_srcdir}/include -I${top_srcdir}/mpp/include
AM_CPPFLAGS = -I${top_srcdir}/include
AM_FCFLAGS = $(FC_MODINC). $(FC_MODOUT)$(MODDIR)

noinst_LTLIBRARIES = libFMS_mod.la
libFMS_mod_la_SOURCES = libFMS.F90

fms.$(FC_MODEXT): .mods/*_mod.$(FC_MODEXT)

nodist_include_HEADERS = libFMS_mod.la
nodist_include_HEADERS = .mods/fms.$(FC_MODEXT)

include $(top_srcdir)/mkmods.mk

Expand Down Expand Up @@ -130,3 +130,20 @@ else
clean-local:
-rm -rf .mods
endif

install-data-hook:
@echo ''
@echo '+-------------------------------------------------------------+'
@echo '| Congratulations! You have successfully installed the FMS |'
@echo '| Fortran library. |'
@echo '| |'
@echo '| After the installed include and link paths have been |'
@echo '| specified, code using FMS should be compiled using the |'
@echo '| "-lFMS" flag. |'
@echo '| |'
@echo '| FMS is developed and maintained at the GFDL publicly on |'
@echo '| Github. To report an issue or view available documentation, |'
@echo '| please see our page: https://www.github.com/NOAA-GFDL/FMS |'
@echo '+-------------------------------------------------------------+'
@echo ''

115 changes: 98 additions & 17 deletions data_override/include/data_override.inc
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,9 @@ use mpp_domains_mod, only : domainUG, mpp_pass_SG_to_UG, mpp_get_UG_SG_domain, N
use time_manager_mod, only: time_type
use fms2_io_mod, only : FmsNetcdfFile_t, open_file, close_file, &
read_data, fms2_io_init, variable_exists, &
get_mosaic_tile_file
get_mosaic_tile_file, file_exists
use get_grid_version_mod, only: get_grid_version_1, get_grid_version_2
use fms_string_utils_mod, only: string

implicit none
private
Expand Down Expand Up @@ -201,18 +202,26 @@ end if

#ifdef use_yaml
if (use_data_table_yaml) then
if (file_exists("data_table")) &
call mpp_error(FATAL, "You cannot have the legacy data_table if use_data_table_yaml=.true.")
call read_table_yaml(data_table)
else
if (file_exists("data_table.yaml"))&
call mpp_error(FATAL, "You cannot have the yaml data_table if use_data_table_yaml=.false.")
allocate(data_table(max_table))
do i = 1, max_table
data_table(i) = default_table
enddo
call read_table(data_table)
end if
#else
if (file_exists("data_table.yaml"))&
call mpp_error(FATAL, "You cannot have the yaml data_table if use_data_table_yaml=.false.")

if (use_data_table_yaml) then
call mpp_error(FATAL, "compilation error, need to compile with `-Duse_yaml`")
call mpp_error(FATAL, "You cannot have use_data_table_yaml=.true. without compiling with -Duse_yaml")
else

allocate(data_table(max_table))
do i = 1, max_table
data_table(i) = default_table
Expand Down Expand Up @@ -491,8 +500,9 @@ subroutine read_table(data_table)
end subroutine read_table
#ifdef use_yaml
!> @brief Read and parse the data_table.yaml
subroutine read_table_yaml(data_table)
type(data_type), dimension(:), allocatable, intent(out) :: data_table
type(data_type), dimension(:), allocatable, intent(out) :: data_table !< Contents of the data_table.yaml
integer, allocatable :: entry_id(:)
integer :: nentries
Expand All @@ -511,6 +521,7 @@ subroutine read_table_yaml(data_table)
do i = 1, nentries
call get_value_from_key(file_id, entry_id(i), "gridname", data_table(i)%gridname)
call check_for_valid_gridname(data_table(i)%gridname)
call get_value_from_key(file_id, entry_id(i), "fieldname_code", data_table(i)%fieldname_code)
data_table(i)%fieldname_file = ""
Expand All @@ -524,28 +535,98 @@ subroutine read_table_yaml(data_table)
data_table(i)%interpol_method = "none"
call get_value_from_key(file_id, entry_id(i), "interpol_method", data_table(i)%interpol_method, &
& is_optional=.true.)
call check_interpol_method(data_table(i)%interpol_method, data_table(i)%file_name, &
data_table(i)%fieldname_file)
call get_value_from_key(file_id, entry_id(i), "factor", data_table(i)%factor)
buffer = ""
call get_value_from_key(file_id, entry_id(i), "region_type", buffer, is_optional=.true.)
if(trim(buffer) == "inside_region" ) then
data_table(i)%region_type = INSIDE_REGION
else if( trim(buffer) == "outside_region" ) then
data_table(i)%region_type = OUTSIDE_REGION
else
data_table(i)%region_type = NO_REGION
endif
call get_value_from_key(file_id, entry_id(i), "lon_start", data_table(i)%lon_start, is_optional=.true.)
call get_value_from_key(file_id, entry_id(i), "lon_end", data_table(i)%lon_end, is_optional=.true.)
call get_value_from_key(file_id, entry_id(i), "lat_start", data_table(i)%lat_start, is_optional=.true.)
call get_value_from_key(file_id, entry_id(i), "lat_end", data_table(i)%lat_end, is_optional=.true.)
call check_and_set_region_type(buffer, data_table(i)%region_type)
if (data_table(i)%region_type .ne. NO_REGION) then
call get_value_from_key(file_id, entry_id(i), "lon_start", data_table(i)%lon_start, is_optional=.true.)
call get_value_from_key(file_id, entry_id(i), "lon_end", data_table(i)%lon_end, is_optional=.true.)
call get_value_from_key(file_id, entry_id(i), "lat_start", data_table(i)%lat_start, is_optional=.true.)
call get_value_from_key(file_id, entry_id(i), "lat_end", data_table(i)%lat_end, is_optional=.true.)
call check_valid_lat_lon(data_table(i)%lon_start, data_table(i)%lon_end, &
data_table(i)%lat_start, data_table(i)%lat_end)
endif
end do
end if
table_size = nentries !< Because one variable is not enough
end subroutine read_table_yaml
!> @brief Check if a grid name is valid, crashes if it is not
subroutine check_for_valid_gridname(gridname)
character(len=*), intent(in) :: gridname !< Gridname
select case(trim(gridname))
case ("OCN", "ATM", "LND", "ICE")
case default
call mpp_error(FATAL, trim(gridname)//" is not a valid gridname. "//&
"The acceptable values are OCN ATM LND and ICE. Check your data_table.yaml")
end select
end subroutine check_for_valid_gridname
!> @brief Check if the interpol method is correct, crashes if it is not
subroutine check_interpol_method(interp_method, filename, fieldname)
character(len=*), intent(in) :: interp_method !< The interpo_method
character(len=*), intent(in) :: filename !< The filename
character(len=*), intent(in) :: fieldname !< The fieldname in the file
select case(trim(interp_method))
case ("bicubic", "bilinear")
if (trim(filename) .eq. "" .or. trim(fieldname) .eq. "") call mpp_error(FATAL, &
"The file_name and the fieldname_file must be set if using the bicubic or bilinear interpolation method."//&
" Check your data_table.yaml")
case ("none")
if (trim(filename) .ne. "" ) then
if (trim(fieldname) .eq. "") call mpp_error(FATAL, &
"If the interpol_method is none and file_name is specified (ongrid case), "//&
"you must also specify the fieldname_file")
endif
case default
call mpp_error(FATAL, trim(interp_method)//" is not a valid interp method. "//&
"The acceptable values are bilinear and bicubic")
end select
end subroutine check_interpol_method
!> @brief Check if a region_type is valid, crashes if it is not. Otherwise it sets the
!! correct integer parameter.
subroutine check_and_set_region_type(region_type_str, region_type_int)
character(len=*), intent(in) :: region_type_str !< The region type as defined in the data.yaml
integer, intent(out) :: region_type_int !< The region type as an integer parameter
select case(trim(region_type_str))
case ("inside_region")
region_type_int = INSIDE_REGION
case ("outside_region")
region_type_int = OUTSIDE_REGION
case ("")
region_type_int = NO_REGION
case default
call mpp_error(FATAL, trim(region_type_str)//" is not a valid region type. "//&
"The acceptable values are inside_region and outside_regioon. Check your data_table.yaml")
end select
end subroutine check_and_set_region_type
!> @brief Check if a region lon_start, lon_end, lat_start and lat_end is valid.
!! Crashes if it is not.
subroutine check_valid_lat_lon(lon_start, lon_end, lat_start, lat_end)
real(FMS_DATA_OVERRIDE_KIND_), intent(in) :: lon_start !< Starting longitude of the data_override region
real(FMS_DATA_OVERRIDE_KIND_), intent(in) :: lon_end !< Ending longitude of the data_override region
real(FMS_DATA_OVERRIDE_KIND_), intent(in) :: lat_start !< Starting lattiude of the data_override region
real(FMS_DATA_OVERRIDE_KIND_), intent(in) :: lat_end !< Ending lattiude of the data_override region
if (lon_start > lon_end) call mpp_error(FATAL, &
"lon_start:"//string(lon_start)//" is greater than lon_end"//string(lon_end)//&
". Check your data_table.yaml.")
if (lat_start > lat_end) call mpp_error(FATAL, &
"lat_start:"//string(lat_start)//" is greater than lat_end:"//string(lat_end)//&
". Check your data_table.yaml.")
end subroutine check_valid_lat_lon
#endif
subroutine DATA_OVERRIDE_UNSET_ATM_
Expand Down
6 changes: 3 additions & 3 deletions test_fms/data_override/Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -71,10 +71,10 @@ TESTS_ENVIRONMENT= test_input_path="@TEST_INPUT_PATH@" \
parser_skip=${skipflag}

# Run the test program.
TESTS = test_data_override2.sh
TESTS = test_data_override2.sh test_data_override_init.sh

# Include these files with the distribution.
EXTRA_DIST = test_data_override2.sh
EXTRA_DIST = test_data_override2.sh test_data_override_init.sh

# Clean up
CLEANFILES = input.nml *.nc* *.out diag_table data_table data_table.yaml INPUT/* *.dpi *.spi *.dyn *.spl
CLEANFILES = input.nml *.nc* *.out diag_table data_table data_table.yaml INPUT/* *.dpi *.spi *.dyn *.spl *-files/*
Loading

0 comments on commit 284075a

Please sign in to comment.