Skip to content

Commit

Permalink
Changed behaviour to catch 'no valid dimension coordinates' error and…
Browse files Browse the repository at this point in the history
… return first dataset if so - necessary to indexing static files

Appears to be potential intermittent issue with Read the Docs build -
unable to reproduce reliably locally.
  • Loading branch information
charles-turner-1 committed Oct 16, 2024
1 parent 2ce3446 commit 6209c8d
Show file tree
Hide file tree
Showing 2 changed files with 48 additions and 29 deletions.
30 changes: 26 additions & 4 deletions intake_esm/source.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import typing
import warnings

import dask
import fsspec
Expand All @@ -11,6 +12,10 @@
from .utils import OPTIONS


class ConcatenationWarning(UserWarning):
pass


class ESMDataSourceError(Exception):
pass

Expand Down Expand Up @@ -87,10 +92,11 @@ def _open_dataset(

variable_intersection = set(requested_variables).intersection(set(varname))

data_vars = [variable for variable in variable_intersection if variable in ds.data_vars]
coord_vars = [variable for variable in variable_intersection if variable in ds.coords]
data_vars = variable_intersection & set(ds.data_vars)
coord_vars = variable_intersection & set(ds.coords)

variables = list(data_vars | coord_vars)

variables = [*data_vars, *coord_vars]
scalar_variables = [v for v in ds.data_vars if len(ds[v].dims) == 0]

ds = ds.set_coords(scalar_variables)
Expand Down Expand Up @@ -262,7 +268,23 @@ def _open_dataset(self):
ds.set_coords(set(ds.variables) - set(ds.attrs[OPTIONS['vars_key']]))
for ds in datasets
]
self._ds = xr.combine_by_coords(datasets, **self.xarray_combine_by_coords_kwargs)
try:
self._ds = xr.combine_by_coords(
datasets, **self.xarray_combine_by_coords_kwargs
)
except ValueError as exc:
if (
str(exc)
== 'Could not find any dimension coordinates to use to order the datasets for concatenation'
):
warnings.warn(
'Attempting to concatenate datasets without valid dimension coordinates: retaining only first dataset.'
' Request valid dimension coordinate to silence this warning.',
category=ConcatenationWarning,
)
self._ds = datasets[0]
else:
raise exc

self._ds.attrs[OPTIONS['dataset_key']] = self.key

Expand Down
47 changes: 22 additions & 25 deletions tests/test_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,41 +107,38 @@ def test_update_attrs(tmp_path, data_format, attrs):
@pytest.mark.parametrize(
'fpath,dvars,cvars,expected',
[
(f1, ['time_bnds'], [''], ['time_bnds', 'time', 'height']),
(f1, ['tasmax'], [''], ['time', 'tasmax', 'height', 'lat', 'lon']),
(f1, [], ['height'], ['height']),
(
f1,
['time_bnds'],
[''],
['time_bnds', 'height', 'time'],
),
(f1, ['tasmax'], [''], ['tasmax', 'height', 'time', 'lat', 'lon']),
(
f1,
[],
['height'],
['height'],
),
(
f1,
[],
[],
[
'time',
'lat',
'lon',
'height',
'time_bnds',
'lon_bnds',
'lat_bnds',
'tasmax',
],
['height', 'time_bnds', 'lon_bnds', 'lat_bnds', 'tasmax', 'time', 'lat', 'lon'],
),
(multi_path, ['time_bnds'], [''], ['time_bnds', 'height', 'time']),
(
multi_path,
['tasmax'],
[''],
['tasmax', 'time', 'height', 'lat', 'lon'],
),
(multi_path, ['time_bnds'], [''], ['time_bnds', 'time', 'height']),
(multi_path, ['tasmax'], [''], ['time', 'tasmax', 'height', 'lat', 'lon']),
(multi_path, [], ['height'], ['height']),
(
multi_path,
[],
[],
[
'time',
'lat',
'lon',
'height',
'time_bnds',
'lon_bnds',
'lat_bnds',
'tasmax',
],
['time_bnds', 'lon_bnds', 'lat_bnds', 'tasmax', 'time', 'height', 'lat', 'lon'],
),
],
)
Expand Down

0 comments on commit 6209c8d

Please sign in to comment.