Skip to content

Commit

Permalink
Merge branch 'SciTools:main' into bugfix-shape-masking
Browse files Browse the repository at this point in the history
  • Loading branch information
hsteptoe authored Feb 7, 2025
2 parents dc090a5 + a65aaea commit 5b1054b
Show file tree
Hide file tree
Showing 70 changed files with 958 additions and 839 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci-manifest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@ concurrency:
jobs:
manifest:
name: "check-manifest"
uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.12.5
uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.01.5
2 changes: 1 addition & 1 deletion .github/workflows/refresh-lockfiles.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,5 @@ on:

jobs:
refresh_lockfiles:
uses: scitools/workflows/.github/workflows/[email protected].0
uses: scitools/workflows/.github/workflows/[email protected].5
secrets: inherit
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ repos:
- id: no-commit-to-branch

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.8.4"
rev: "v0.9.4"
hooks:
- id: ruff
types: [file, python]
Expand All @@ -38,7 +38,7 @@ repos:
types: [file, python]

- repo: https://github.com/codespell-project/codespell
rev: "v2.3.0"
rev: "v2.4.1"
hooks:
- id: codespell
types_or: [asciidoc, python, markdown, rst]
Expand All @@ -63,7 +63,7 @@ repos:
types: [file, python]

- repo: https://github.com/pre-commit/mirrors-mypy
rev: 'v1.14.0'
rev: 'v1.14.1'
hooks:
- id: mypy
additional_dependencies:
Expand Down
52 changes: 52 additions & 0 deletions benchmarks/benchmarks/load/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,58 @@ def time_many_var_load(self) -> None:
_ = load(str(self.FILE_PATH))


class ManyCubes:
FILE_PATH = BENCHMARK_DATA / "many_cube_file.nc"

@staticmethod
def _create_file(save_path: str) -> None:
"""Run externally - everything must be self-contained."""
import numpy as np

from iris import save
from iris.coords import AuxCoord, DimCoord
from iris.cube import Cube, CubeList

data_len = 81920
bnds_len = 3
data = np.arange(data_len).astype(np.float32)
bnds_data = (
np.arange(data_len * bnds_len)
.astype(np.float32)
.reshape(data_len, bnds_len)
)
time = DimCoord(np.array([0]), standard_name="time")
lat = AuxCoord(
data, bounds=bnds_data, standard_name="latitude", units="degrees"
)
lon = AuxCoord(
data, bounds=bnds_data, standard_name="longitude", units="degrees"
)
cube = Cube(data.reshape(1, -1), units="unknown")
cube.add_dim_coord(time, 0)
cube.add_aux_coord(lat, 1)
cube.add_aux_coord(lon, 1)

n_cubes = 100
cubes = CubeList()
for i in range(n_cubes):
cube = cube.copy()
cube.long_name = f"var_{i}"
cubes.append(cube)
save(cubes, save_path)

def setup_cache(self) -> None:
if not REUSE_DATA or not self.FILE_PATH.is_file():
# See :mod:`benchmarks.generate_data` docstring for full explanation.
_ = run_function_elsewhere(
self._create_file,
str(self.FILE_PATH),
)

def time_many_cube_load(self) -> None:
_ = load(str(self.FILE_PATH))


class StructuredFF:
"""Test structured loading of a large-ish fieldsfile.
Expand Down
2 changes: 1 addition & 1 deletion docs/src/community/iris_xarray.rst
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ output is not fully CF compliant (as-per `the cf checker <https://cfchecker.ncas
approach in Iris, and means that the use of the "coordinates" attribute in output is
often not CF compliant.
* dates are converted to datetime-like objects internally. There are special features
providing `support for non-standard calendars <https://docs.xarray.dev/en/stable/user-guide/weather-climate.html#non-standard-calendars-and-dates-outside-the-nanosecond-precision-range>`_,
providing `support for non-standard calendars <https://docs.xarray.dev/en/stable/user-guide/weather-climate.html#non-standard-calendars-and-dates-outside-the-precision-range>`_,
however date units may not always be saved correctly.
* CF-style coordinate bounds variables are not fully understood. The CF approach
where bounds variables do not usually define their units or standard_names can cause
Expand Down
2 changes: 1 addition & 1 deletion docs/src/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def _dotv(version):
.. |python_version| replace:: {build_python_version}
.. |python_support| replace:: {python_support}
.. |iris_version| replace:: v{version}
.. |build_date| replace:: ({datetime.datetime.now().strftime('%d %b %Y')})
.. |build_date| replace:: ({datetime.datetime.now().strftime("%d %b %Y")})
"""

# Add any Sphinx extension module names here, as strings. They can be
Expand Down
2 changes: 1 addition & 1 deletion docs/src/further_topics/ugrid/other_meshes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -360,5 +360,5 @@ dimensions into a single mesh dimension. Since Iris cubes don't support a "resh
.. _WAVEWATCH III: https://github.com/NOAA-EMC/WW3
.. _FESOM 1.4: https://www.fesom.de/models/fesom14/
.. _FESOM 1.4: https://fesom.de/models/fesom14/
.. _NEMO: https://www.nemo-ocean.eu/
70 changes: 15 additions & 55 deletions docs/src/userguide/interpolation_and_regridding.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@ The following are the regridding schemes that are currently available in Iris:
* point in cell regridding (:class:`iris.analysis.PointInCell`) and
* area-weighted regridding (:class:`iris.analysis.AreaWeighted`, first-order conservative).

The linear, nearest-neighbor, and area-weighted regridding schemes support
lazy regridding, i.e. if the source cube has lazy data, the resulting cube
will also have lazy data.
The linear and nearest-neighbour interpolation schemes, and the linear, nearest-neighbour,
and area-weighted regridding schemes support lazy regridding, i.e. if the source cube has lazy data,
the resulting cube will also have lazy data.
See :doc:`real_and_lazy_data` for an introduction to lazy data.
See :doc:`../further_topics/which_regridder_to_use` for a more in depth overview of the different regridders.

Expand Down Expand Up @@ -194,46 +194,6 @@ For example, to mask values that lie beyond the range of the original data:
[-- 494.44451904296875 588.888916015625 683.333251953125 777.77783203125
872.2222290039062 966.666748046875 1061.111083984375 1155.555419921875 --]


.. _caching_an_interpolator:

Caching an Interpolator
^^^^^^^^^^^^^^^^^^^^^^^

If you need to interpolate a cube on multiple sets of sample points you can
'cache' an interpolator to be used for each of these interpolations. This can
shorten the execution time of your code as the most computationally
intensive part of an interpolation is setting up the interpolator.

To cache an interpolator you must set up an interpolator scheme and call the
scheme's interpolator method. The interpolator method takes as arguments:

#. a cube to be interpolated, and
#. an iterable of coordinate names or coordinate instances of the coordinates that are to be interpolated over.

For example:

>>> air_temp = iris.load_cube(iris.sample_data_path('air_temp.pp'))
>>> interpolator = iris.analysis.Nearest().interpolator(air_temp, ['latitude', 'longitude'])

When this cached interpolator is called you must pass it an iterable of sample points
that have the same form as the iterable of coordinates passed to the constructor.
So, to use the cached interpolator defined above:

>>> latitudes = np.linspace(48, 60, 13)
>>> longitudes = np.linspace(-11, 2, 14)
>>> for lat, lon in zip(latitudes, longitudes):
... result = interpolator([lat, lon])

In each case ``result`` will be a cube interpolated from the ``air_temp`` cube we
passed to interpolator.

Note that you must specify the required extrapolation mode when setting up the cached interpolator.
For example::

>>> interpolator = iris.analysis.Nearest(extrapolation_mode='nan').interpolator(cube, coords)


.. _regridding:

Regridding
Expand Down Expand Up @@ -417,24 +377,24 @@ In each case ``result`` will be the input cube regridded to the grid defined by
the target grid cube (in this case ``rotated_psl``) that we used to define the
cached regridder.

Regridding Lazy Data
^^^^^^^^^^^^^^^^^^^^
Interpolating and Regridding Lazy Data
--------------------------------------

If you are working with large cubes, especially when you are regridding to a
high resolution target grid, you may run out of memory when trying to
regrid a cube. When this happens, make sure the input cube has lazy data
If you are working with large cubes, you may run out of memory when trying to
interpolate or regrid a cube. For instance, this might happen when regridding to a
high resolution target grid. When this happens, make sure the input cube has lazy data

>>> air_temp = iris.load_cube(iris.sample_data_path('A1B_north_america.nc'))
>>> air_temp
<iris 'Cube' of air_temperature / (K) (time: 240; latitude: 37; longitude: 49)>
>>> air_temp.has_lazy_data()
True

and the regridding scheme supports lazy data. All regridding schemes described
here support lazy data. If you still run out of memory even while using lazy
data, inspect the
`chunks <https://docs.dask.org/en/latest/array-chunks.html>`__
:
and the interpolation or regridding scheme supports lazy data. All interpolation and
regridding schemes described here with exception of :class:`iris.analysis.PointInCell`
(point-in-cell regridder) and :class:`iris.analysis.UnstructuredNearest` (nearest-neighbour
regridder) support lazy data. If you still run out of memory even while using lazy data,
inspect the `chunks <https://docs.dask.org/en/latest/array-chunks.html>`__ :

>>> air_temp.lazy_data().chunks
((240,), (37,), (49,))
Expand All @@ -455,6 +415,6 @@ dimension, to regrid it in 8 chunks of 30 timesteps at a time:
Assuming that Dask is configured such that it processes only a few chunks of
the data array at a time, this will further reduce memory use.

Note that chunking in the horizontal dimensions is not supported by the
regridding schemes. Chunks in these dimensions will automatically be combined
Note that chunking in the horizontal dimensions is not supported by the interpolation
and regridding schemes. Chunks in these dimensions will automatically be combined
before regridding.
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Addind a citation for a plot using iris.plot.citation()."""
"""Adding a citation for a plot using iris.plot.citation()."""

import matplotlib.pyplot as plt

Expand Down
29 changes: 24 additions & 5 deletions docs/src/whatsnew/latest.rst
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,16 @@ This document explains the changes made to Iris for this release
(:issue:`6248`, :pull:`6257`)


#. `@fnattino`_ added the lazy median aggregator :class:`iris.analysis.MEDIAN`
based on the implementation discussed by `@rcomer`_ and `@stefsmeets`_ in
:issue:`4039` (:pull:`6167`).


🐛 Bugs Fixed
=============

#. N/A
#. `@rcomer`_ added handling for string stash codes when saving pp files.
(:issue:`6239`, :pull:`6289`)


💣 Incompatible Changes
Expand All @@ -55,7 +60,15 @@ This document explains the changes made to Iris for this release
🚀 Performance Enhancements
===========================

#. N/A
#. `@bouweandela`_ made loading :class:`~iris.cube.Cube`s from small NetCDF
files faster. (:pull:`6229`)

#. `@fnattino`_ enabled lazy cube interpolation using the linear and
nearest-neighbour interpolators (:class:`iris.analysis.Linear` and
:class:`iris.analysis.Nearest`). Note that this implementation removes
performance benefits linked to caching an interpolator object. While this does
not break previously suggested code (instantiating and re-using an interpolator
object remains possible), this is no longer an advertised feature. (:pull:`6084`)


🔥 Deprecations
Expand Down Expand Up @@ -93,13 +106,19 @@ This document explains the changes made to Iris for this release
:doc:`/developers_guide/release_do_nothing` to be more thorough and apply
lessons learned from recent releases. (:pull:`6062`)

#. `@schlunma`_ made lazy [smart
weights](https://github.com/SciTools/iris/pull/5084) used for cube
aggregations have the same chunks as their parent cube if broadcasting is
necessary. (:issue:`6285`, :pull:`6288`)


.. comment
Whatsnew author names (@github name) in alphabetical order. Note that,
core dev names are automatically included by the common_links.inc:
.. _@fnattino: https://github.com/fnattino
.. _@jrackham-mo: https://github.com/jrackham-mo
.. _@stefsmeets: https://github.com/stefsmeets

.. comment
Whatsnew resources in alphabetical order:
Whatsnew resources in alphabetical order:
4 changes: 2 additions & 2 deletions lib/iris/_representation/cube_printout.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def add_row(self, cols, aligns, i_col_unlimited=None):
"""
n_cols = len(cols)
if len(aligns) != n_cols:
msg = f"Number of aligns ({len(aligns)})" f" != number of cols ({n_cols})"
msg = f"Number of aligns ({len(aligns)}) != number of cols ({n_cols})"
raise ValueError(msg)
if self.n_columns is not None:
# For now, all rows must have same number of columns
Expand Down Expand Up @@ -106,7 +106,7 @@ def formatted_as_strings(self):
elif align == "right":
col_text = col.rjust(width)
else:
msg = f'Unknown alignment "{align}" ' 'not in ("left", "right")'
msg = f'Unknown alignment "{align}" not in ("left", "right")'
raise ValueError(msg)
col_texts.append(col_text)

Expand Down
33 changes: 24 additions & 9 deletions lib/iris/analysis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1198,10 +1198,15 @@ def __init__(self, weights, cube):
dim_metadata = cube._dimensional_metadata(weights)
derived_array = dim_metadata._core_values()
if dim_metadata.shape != cube.shape:
if isinstance(derived_array, da.Array):
chunks = cube.lazy_data().chunks
else:
chunks = None
derived_array = iris.util.broadcast_to_shape(
derived_array,
cube.shape,
dim_metadata.cube_dims(cube),
chunks=chunks,
)
derived_units = dim_metadata.units

Expand Down Expand Up @@ -1612,6 +1617,19 @@ def _lazy_max_run(array, axis=-1, **kwargs):
return result


def _lazy_median(data, axis=None, **kwargs):
"""Calculate the lazy median, with support for masked arrays."""
# Dask median requires the axes to be explicitly listed.
axis = range(data.ndim) if axis is None else axis

if np.issubdtype(data, np.integer):
data = data.astype(float)
filled = da.ma.filled(data, np.nan)
result = da.nanmedian(filled, axis=axis, **kwargs)
result_masked = da.ma.fix_invalid(result)
return result_masked


def _rms(array, axis, **kwargs):
rval = np.sqrt(ma.average(array**2, axis=axis, **kwargs))

Expand Down Expand Up @@ -1940,7 +1958,9 @@ def interp_order(length):
"""


MEDIAN = Aggregator("median", ma.median)
MEDIAN = Aggregator(
"median", ma.median, lazy_func=_build_dask_mdtol_function(_lazy_median)
)
"""
An :class:`~iris.analysis.Aggregator` instance that calculates
the median over a :class:`~iris.cube.Cube`, as computed by
Expand All @@ -1953,8 +1973,7 @@ def interp_order(length):
result = cube.collapsed('longitude', iris.analysis.MEDIAN)
This aggregator handles masked data, but NOT lazy data. For lazy aggregation,
please try :obj:`~.PERCENTILE`.
This aggregator handles masked data and lazy data.
"""

Expand Down Expand Up @@ -2673,9 +2692,7 @@ def interpolator(self, cube, coords):
the given coordinates.
Typically you should use :meth:`iris.cube.Cube.interpolate` for
interpolating a cube. There are, however, some situations when
constructing your own interpolator is preferable. These are detailed
in the :ref:`user guide <caching_an_interpolator>`.
interpolating a cube.
Parameters
----------
Expand Down Expand Up @@ -2876,9 +2893,7 @@ def interpolator(self, cube, coords):
by the dimensions of the specified coordinates.
Typically you should use :meth:`iris.cube.Cube.interpolate` for
interpolating a cube. There are, however, some situations when
constructing your own interpolator is preferable. These are detailed
in the :ref:`user guide <caching_an_interpolator>`.
interpolating a cube.
Parameters
----------
Expand Down
Loading

0 comments on commit 5b1054b

Please sign in to comment.