Skip to content

Revert "merge"

Revert "merge" #4

Workflow file for this run

# This file builds and runs a lightweight version of the MDTF test suite.
# Note that the tests assess functionality of the diagnostics,
# and do not evaluate data or scientific content.
name: MDTF_test
on:
push:
branches:
- main
pull_request:
branches:
- main
defaults:
run:
shell: bash -l {0}
jobs:
build:
runs-on: ${{matrix.os}}
strategy:
matrix:
os: [ubuntu-latest, macos-13]
json-file: ["tests/github_actions_test_ubuntu_set1.jsonc","tests/github_actions_test_macos_set1.jsonc"]
json-file-set2: ["tests/github_actions_test_ubuntu_set2.jsonc", "tests/github_actions_test_macos_set2.jsonc"]
json-file-set3: ["tests/github_actions_test_ubuntu_set3.jsonc", "tests/github_actions_test_macos_set3.jsonc"]
# if experimental is true, other jobs to run if one fails
experimental: [false]
exclude:
- os: ubuntu-latest
json-file: "tests/github_actions_test_macos_set1.jsonc"
- os: ubuntu-latest
json-file-set2: "tests/github_actions_test_macos_set2.jsonc"
- os: ubuntu-latest
json-file-set3: "tests/github_actions_test_macos_set3.jsonc"
- os: macos-12
json-file: "tests/github_actions_test_ubuntu_set1.jsonc"
- os: macos-12
json-file-set2: "tests/github_actions_test_ubuntu_set2.jsonc"
- os: macos-12
json-file-set3: "tests/github_actions_test_ubuntu_set3.jsonc"
- os: macos-13
json-file: "tests/github_actions_test_ubuntu_set1.jsonc"
- os: macos-13
json-file-set2: "tests/github_actions_test_ubuntu_set2.jsonc"
- os: macos-13
json-file-set3: "tests/github_actions_test_ubuntu_set3.jsonc"
max-parallel: 3
steps:
- uses: actions/checkout@v3
#- name: Download Miniconda 3
# uses: conda-incubator/setup-miniconda@v2
# with:
# miniconda-version: "latest"
# python-version: 3.11.0
# channels: conda-forge
#- name: Conda info
# shell: bash -el {0}
# run: conda info
# Set up Micromamba
- uses: mamba-org/setup-micromamba@v1
with:
init-shell: bash
condarc: |
channels:
- conda-forge
- name: Install XQuartz if macOS
if: ${{ matrix.os == 'macos-12' || matrix.os == 'macos-13'}}
run: |
echo "Installing XQuartz"
brew install --cask xquartz
echo "CONDA_ROOT=$(echo /Users/runner/micromamba)" >> $GITHUB_ENV
echo "MICROMAMBA_EXE=$(echo /Users/runner/micromamba-bin/micromamba)" >> $GITHUB_ENV
echo "CONDA_ENV_DIR=$(echo /Users/runner/micromamba/envs)" >> $GITHUB_ENV
- name: Set environment variables
run: |
echo "POD_OUTPUT=$(echo $PWD/../wkdir)" >> $GITHUB_ENV
- name: Set conda vars
if: ${{ matrix.os == 'ubuntu-latest' }}
run: |
echo "MICROMAMBA_EXE=$(echo /home/runner/micromamba-bin/micromamba)" >> $GITHUB_ENV
echo "CONDA_ROOT=$(echo /home/runner/micromamba)" >> $GITHUB_ENV
echo "CONDA_ENV_DIR=$(echo /home/runner/micromamba/envs)" >> $GITHUB_ENV
- name: Install Conda Environments
run: |
echo "Installing Conda Environments"
echo "conda root ${CONDA_ROOT}"
echo "env dir ${CONDA_ENV_DIR}"
# MDTF-specific setup: install all conda envs
./src/conda/micromamba_env_setup.sh --all --micromamba_root ${CONDA_ROOT} --micromamba_exe ${MICROMAMBA_EXE} --env_dir ${CONDA_ENV_DIR}
echo "Creating the _MDTF_synthetic_data environment"
micromamba create -y -f ./src/conda/_env_synthetic_data.yml
- name: Generate Model Data
run: |
cd ../
micromamba activate _MDTF_synthetic_data
pip install mdtf-test-data
mkdir mdtf_test_data ; cd mdtf_test_data
# generate the data and run unit tests
mdtf_synthetic.py -c GFDL --startyear 1 --nyears 10 --unittest
mdtf_synthetic.py -c NCAR --startyear 1975 --nyears 7
mdtf_synthetic.py -c CMIP --startyear 1990 --nyears 20
cd ../
mkdir wkdir
## make input data directories
mkdir -p inputdata/obs_data
- name: Get Observational Data for Set 1
run: |
echo "${PWD}"
cd ../
echo "Available Space"
df -h
# attempt FTP data fetch
# allow 20 min for transfer before timeout; Github actions allows 6 hours for individual
# jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos.
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/convective_transition_diag_obs_data.tar --output convective_transition_diag_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/EOF_500hPa_obs_data.tar --output EOF_500hPa_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/Wheeler_Kiladis_obs_data.tar --output Wheeler_Kiladis_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_teleconnection_obs_data.tar --output MJO_teleconnection_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_suite_obs_data.tar --output MJO_suite_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/precip_diurnal_cycle_obs_data.tar --output precip_diurnal_cycle_obs_data.tar
echo "Untarring set 1 NCAR/CESM standard test files"
tar -xvf convective_transition_diag_obs_data.tar
tar -xvf EOF_500hPa_obs_data.tar
tar -xvf precip_diurnal_cycle_obs_data.tar
tar -xvf MJO_teleconnection_obs_data.tar
tar -xvf MJO_suite_obs_data.tar
tar -xvf Wheeler_Kiladis_obs_data.tar
# clean up tarballs
rm -f *.tar
- name: Run diagnostic tests set 1
run: |
echo "POD_OUTPUT is: "
echo "${POD_OUTPUT}"
micromamba activate _MDTF_base
# trivial check that install script worked
./mdtf_framework.py --version
# run the test PODs
./mdtf -v -f ${{matrix.json-file}}
# Debug POD log(s)
# cat ${POD_OUTPUT}/MDTF_NCAR.Synthetic_1975_1981/Wheeler_Kiladis/Wheeler_Kiladis.log
- name: Get observational data for set 2
run: |
echo "${PWD}"
# remove data from previous run
# Actions moves you to the root repo directory in every step, so need to cd again
cd ../inputdata/obs_data
echo "deleting obs data from set 1"
rm -rf *
cd ../../
echo "Available Space"
df -h
# attempt FTP data fetch
# allow 20 min for transfer before timeout; Github actions allows 6 hours for individual
# jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos.
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_prop_amp_obs_data.tar --output MJO_prop_amp_obs_data.tar
echo "Untarring set 2 GFDL standard test files"
tar -xvf MJO_prop_amp_obs_data.tar
# clean up tarballs
rm -f *.tar
- name: Run diagnostic tests set 2
run: |
micromamba activate _MDTF_base
# run the test PODs
./mdtf -v -f ${{matrix.json-file-set2}}
# Uncomment the following line for debugging
#cat ../wkdir/MDTF_GFDL.Synthetic_1_10/MJO_prop_amp/MJO_prop_amp.log
- name: Get observational data for set 3
run: |
echo "${PWD}"
# remove data from previous run
# Actions moves you to the root repo directory in every step, so need to cd again
cd ../inputdata/obs_data
echo "deleting obs data from set 2"
rm -rf *
cd ../../
echo "Available Space"
df -h
# attempt FTP data fetch
# allow 20 min for transfer before timeout; Github actions allows 6 hours for individual
# jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos.
#curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/temp_extremes_distshape_obs_data.tar --output temp_extremes_distshape_obs_data.tar
#curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/tropical_pacific_sea_level_obs_data.tar.gz --output tropical_pacific_sea_level_obs_data.tar.gz
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/mixed_layer_depth_obs_data.tar --output mixed_layer_depth_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/ocn_surf_flux_diag_obs_data.tar --output ocn_surf_flux_diag_obs_data.tar
# curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/albedofb_obs_data.tar --output albedofb_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/seaice_suite_obs_data.tar --output seaice_suite_obs_data.tar
curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/stc_eddy_heat_fluxes_obs_data.tar --output stc_eddy_heat_fluxes_obs_data.tar
echo "Untarring set 3 CMIP standard test files"
#tar -xvf temp_extremes_distshape_obs_data.tar
#tar -zxvf tropical_pacific_sea_level_obs_data.tar.gz
tar -xvf mixed_layer_depth_obs_data.tar
tar -xvf ocn_surf_flux_diag_obs_data.tar
# tar -xvf albedofb_obs_data.tar
tar -xvf seaice_suite_obs_data.tar
tar -xvf stc_eddy_heat_fluxes_obs_data.tar
# clean up tarballs
rm -f *.tar
rm -f *.tar.gz
- name: Run CMIP diagnostic tests set 3
run: |
micromamba activate _MDTF_base
# run the test PODs
./mdtf -v -f ${{matrix.json-file-set3}}
- name: Run unit tests
run: |
micromamba activate _MDTF_base
python -m unittest discover