Skip to content

Commit

Permalink
Merge branch 'main' into fixes_mpi_highresmip_models
Browse files Browse the repository at this point in the history
  • Loading branch information
valeriupredoi committed Jan 16, 2024
2 parents 675117d + f515169 commit 97bb450
Show file tree
Hide file tree
Showing 22 changed files with 2,323 additions and 1,047 deletions.
4 changes: 2 additions & 2 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ jobs:
working_directory: /esmvaltool
docker:
- image: condaforge/mambaforge
resource_class: small
resource_class: medium
steps:
- run:
command: |
Expand All @@ -182,7 +182,7 @@ jobs:
# Test building documentation
docker:
- image: condaforge/mambaforge
resource_class: small
resource_class: medium
steps:
- checkout
- run:
Expand Down
20 changes: 11 additions & 9 deletions .github/workflows/build-and-deploy-on-pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,18 @@ jobs:
build-n-publish:
name: Build and publish ESMValCore on PyPi
runs-on: ubuntu-latest
environment:
name: pypi
url: https://pypi.org/project/ESMValCore/
permissions:
# IMPORTANT: this permission is mandatory for trusted publishing
id-token: write
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Python 3.11
uses: actions/setup-python@v1
uses: actions/setup-python@v4
with:
python-version: "3.11"
- name: Install pep517
Expand All @@ -34,14 +40,10 @@ jobs:
--binary
--out-dir dist/
.
#- name: Publish distribution 📦 to Test PyPI
# uses: pypa/gh-action-pypi-publish@master
# with:
# password: ${{ secrets.test_pypi_password }}
# repository_url: https://test.pypi.org/legacy/
# - name: Publish distribution to Test PyPI
# uses: pypa/gh-action-pypi-publish@release/v1
# with:
# repository-url: https://test.pypi.org/legacy/
- name: Publish distribution 📦 to PyPI
if: startsWith(github.ref, 'refs/tags')
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.pypi_password }}
1 change: 0 additions & 1 deletion .github/workflows/install-from-condalock-file.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ jobs:
- run: which python
- run: python -V 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt
- run: conda create --name esmvaltool-fromlock --file conda-linux-64.lock
- run: conda install pip
- run: which python
- run: pip --version
- run: pip install -e .[develop]
Expand Down
4 changes: 2 additions & 2 deletions CITATION.cff
Original file line number Diff line number Diff line change
Expand Up @@ -201,11 +201,11 @@ authors:
given-names: Joerg

cff-version: 1.2.0
date-released: 2023-11-01
date-released: 2023-12-19
doi: "10.5281/zenodo.3387139"
license: "Apache-2.0"
message: "If you use this software, please cite it using these metadata."
repository-code: "https://github.com/ESMValGroup/ESMValCore/"
title: ESMValCore
version: "v2.10.0rc1"
version: "v2.10.0"
...
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
[![codecov](https://codecov.io/gh/ESMValGroup/ESMValCore/branch/main/graph/badge.svg?token=wQnDzguwq6)](https://codecov.io/gh/ESMValGroup/ESMValCore)
[![Codacy Badge](https://app.codacy.com/project/badge/Grade/5d496dea9ef64ec68e448a6df5a65783)](https://www.codacy.com/gh/ESMValGroup/ESMValCore?utm_source=github.com&utm_medium=referral&utm_content=ESMValGroup/ESMValCore&utm_campaign=Badge_Grade)
[![Docker Build Status](https://img.shields.io/docker/cloud/build/esmvalgroup/esmvalcore)](https://hub.docker.com/r/esmvalgroup/esmvalcore/)
[![Anaconda-Server Badge](https://img.shields.io/badge/Anaconda.org-2.9.0-blue.svg)](https://anaconda.org/conda-forge/esmvalcore)
[![Anaconda-Server Badge](https://img.shields.io/conda/vn/conda-forge/ESMValCore?color=blue&label=conda-forge&logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/esmvalcore)
[![Github Actions Test](https://github.com/ESMValGroup/ESMValCore/actions/workflows/run-tests.yml/badge.svg)](https://github.com/ESMValGroup/ESMValCore/actions/workflows/run-tests.yml)

![esmvaltoollogo](https://raw.githubusercontent.com/ESMValGroup/ESMValCore/main/doc/figures/ESMValTool-logo-2.png)
Expand Down
392 changes: 200 additions & 192 deletions conda-linux-64.lock

Large diffs are not rendered by default.

1,475 changes: 741 additions & 734 deletions doc/changelog.rst

Large diffs are not rendered by default.

31 changes: 31 additions & 0 deletions doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@
'nbsphinx',
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.extlinks',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
Expand Down Expand Up @@ -450,6 +451,36 @@
'scipy': ('https://docs.scipy.org/doc/scipy/', None),
}

# -- Extlinks extension -------------------------------------------------------
# See https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html

extlinks = {
"discussion": (
"https://github.com/ESMValGroup/ESMValCore/discussions/%s",
"Discussion #%s",
),
"issue": (
"https://github.com/ESMValGroup/ESMValCore/issues/%s",
"Issue #%s",
),
"pull": (
"https://github.com/ESMValGroup/ESMValCore/pull/%s",
"Pull request #%s",
),
"release": (
"https://github.com/ESMValGroup/ESMValCore/releases/tag/%s",
"ESMValCore %s",
),
"team": (
"https://github.com/orgs/ESMValGroup/teams/%s",
"@ESMValGroup/%s",
),
"user": (
"https://github.com/%s",
"@%s",
),
}

# -- Custom Document processing ----------------------------------------------

sys.path.append(os.path.dirname(__file__))
Expand Down
3 changes: 1 addition & 2 deletions doc/quickstart/configure.rst
Original file line number Diff line number Diff line change
Expand Up @@ -243,8 +243,7 @@ Extensive documentation on setting up Dask Clusters is available

If not all preprocessor functions support lazy data, computational
performance may be best with the default scheduler.
See `issue #674 <https://github.com/ESMValGroup/ESMValCore/issues/674>`_ for
progress on making all preprocessor functions lazy.
See :issue:`674` for progress on making all preprocessor functions lazy.

**Example configurations**

Expand Down
50 changes: 50 additions & 0 deletions doc/recipe/preprocessor.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1219,6 +1219,7 @@ The ``_time.py`` module contains the following preprocessor functions:
* regrid_time_: Aligns the time axis of each dataset to have common time
points and calendars.
* timeseries_filter_: Allows application of a filter to the time-series data.
* local_solar_time_: Convert cube with UTC time to local solar time.

Statistics functions are applied by default in the order they appear in the
list. For example, the following example applied to hourly data will retrieve
Expand Down Expand Up @@ -1653,6 +1654,55 @@ Examples:
See also :func:`esmvalcore.preprocessor.timeseries_filter`.

.. _local_solar_time:

``local_solar_time``
--------------------

Many variables in the Earth system show a strong diurnal cycle.
The reason for that is of course Earth's rotation around its own axis, which
leads to a diurnal cycle of the incoming solar radiation.
While UTC time is a very good absolute time measure, it is not really suited to
analyze diurnal cycles over larger regions.
For example, diurnal cycles over Russia and the USA are phase-shifted by ~180°
= 12 hr in UTC time.

This is where the `local solar time (LST)
<https://en.wikipedia.org/wiki/Solar_time>`__ comes into play:
For a given location, 12:00 noon LST is defined as the moment when the sun
reaches its highest point in the sky.
By using this definition based on the origin of the diurnal cycle (the sun), we
can directly compare diurnal cycles across the globe.
LST is mainly determined by the longitude of a location, but due to the
eccentricity of Earth's orbit, it also depends on the day of year (see
`equation of time <https://en.wikipedia.org/wiki/Equation_of_time>`__).
However, this correction is at most ~15 min, which is usually smaller than the
highest frequency output of CMIP6 models (1 hr) and smaller than the time scale
for diurnal evolution of meteorological phenomena (which is in the order of
hours, not minutes).
Thus, instead, we use the **mean** LST, which solely depends on longitude:

.. math::
LST = UTC + 12 \cdot \frac{lon}{180°}
where the times are given in hours and `lon` in degrees in the interval [-180,
180].
To transform data from UTC to LST, this preprocessor shifts data along the time
axis based on the longitude.

This preprocessor does not need any additional parameters.

Example:

.. code-block:: yaml
calculate_local_solar_time:
local_solar_time:
See also :func:`esmvalcore.preprocessor.local_solar_time`.


.. _area operations:

Area manipulation
Expand Down
4 changes: 2 additions & 2 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ dependencies:
- geopy
- humanfriendly
- importlib_metadata # required for Python < 3.10
- iris >=3.6.0
- iris >=3.6.1
- iris-esmf-regrid >=0.7.0
- isodate
- jinja2
Expand All @@ -37,7 +37,7 @@ dependencies:
- psutil
- py-cordex
- pybtex
- python >=3.9
- python >=3.9,<3.12
- python-stratify >=0.3
- pyyaml
- requests
Expand Down
16 changes: 8 additions & 8 deletions esmvalcore/cmor/_fixes/fix.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from pathlib import Path
from typing import TYPE_CHECKING, Any, Optional

import dask
import numpy as np
from cf_units import Unit
from iris.coords import Coord, CoordExtent
Expand Down Expand Up @@ -675,18 +676,17 @@ def _fix_longitude_0_360(
if not cube_coord.standard_name == 'longitude':
return (cube, cube_coord)

# Only apply fixes when values are outside of valid range [0, 360]
inside_0_360 = all([
cube_coord.core_points().min() >= 0.0,
cube_coord.core_points().max() <= 360.0,
])
if inside_0_360:
points = cube_coord.core_points()
min_, max_ = dask.compute(points.min(), points.max())

# Do not apply fixes when values are inside of valid range [0, 360]
if min_ >= 0.0 and max_ <= 360.0:
return (cube, cube_coord)

# Cannot fix longitudes outside [-360, 720]
if np.any(cube_coord.core_points() < -360.0):
if min_ < -360.0:
return (cube, cube_coord)
if np.any(cube_coord.core_points() > 720.0):
if max_ > 720.0:
return (cube, cube_coord)

# cube.intersection only works for cells with 0 or 2 bounds
Expand Down
53 changes: 35 additions & 18 deletions esmvalcore/cmor/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,20 @@

import logging
import warnings
from collections import namedtuple
from collections.abc import Callable
from enum import IntEnum
from functools import cached_property
from typing import Optional

import cf_units
import dask
import iris.coord_categorisation
import iris.coords
import iris.exceptions
import iris.util
import numpy as np
from iris.coords import Coord
from iris.cube import Cube

from esmvalcore.cmor._fixes.fix import GenericFix
Expand All @@ -24,7 +27,7 @@
_get_simplified_calendar,
_is_unstructured_grid,
)
from esmvalcore.cmor.table import get_var_info
from esmvalcore.cmor.table import CoordinateInfo, get_var_info
from esmvalcore.exceptions import ESMValCoreDeprecationWarning


Expand Down Expand Up @@ -500,6 +503,7 @@ def _check_alternative_dim_names(self, key):

def _check_coords(self):
"""Check coordinates."""
coords = []
for coordinate in self._cmor_var.coordinates.values():
# Cannot check generic_level coords with no CMOR information
if coordinate.generic_level and not coordinate.out_name:
Expand All @@ -513,6 +517,36 @@ def _check_coords(self):
continue

self._check_coord(coordinate, coord, var_name)
coords.append((coordinate, coord))

self._check_coord_ranges(coords)

def _check_coord_ranges(self, coords: list[tuple[CoordinateInfo, Coord]]):
"""Check coordinate value are inside valid ranges."""
Limit = namedtuple('Limit', ['name', 'type', 'limit', 'value'])

limits = []
for coord_info, coord in coords:
points = coord.core_points()
for limit_type in 'min', 'max':
valid = getattr(coord_info, f'valid_{limit_type}')
if valid != "":
limit = Limit(
name=coord_info.out_name,
type=limit_type,
limit=float(valid),
value=getattr(points, limit_type)(),
)
limits.append(limit)

limits = dask.compute(*limits)
for limit in limits:
if limit.type == 'min' and limit.value < limit.limit:
self.report_critical(self._vals_msg, limit.name,
'< valid_min =', limit.limit)
if limit.type == 'max' and limit.value > limit.limit:
self.report_critical(self._vals_msg, limit.name,
'> valid_max =', limit.limit)

def _check_coords_data(self):
"""Check coordinate data."""
Expand Down Expand Up @@ -593,24 +627,7 @@ def _check_coord_monotonicity_and_direction(self, cmor, coord, var_name):

def _check_coord_points(self, coord_info, coord, var_name):
"""Check coordinate points: values, bounds and monotonicity."""
# Check requested coordinate values exist in coord.points
self._check_requested_values(coord, coord_info, var_name)

# Check coordinate value ranges
if coord_info.valid_min:
valid_min = float(coord_info.valid_min)
if np.any(coord.core_points() < valid_min):
self.report_critical(self._vals_msg, var_name,
'< {} ='.format('valid_min'),
valid_min)

if coord_info.valid_max:
valid_max = float(coord_info.valid_max)
if np.any(coord.core_points() > valid_max):
self.report_critical(self._vals_msg, var_name,
'> {} ='.format('valid_max'),
valid_max)

self._check_coord_bounds(coord_info, coord, var_name)
self._check_coord_monotonicity_and_direction(coord_info, coord,
var_name)
Expand Down
Loading

0 comments on commit 97bb450

Please sign in to comment.