Skip to content

Commit

Permalink
Let algorithms extend dimensions (for impurities)
Browse files Browse the repository at this point in the history
  • Loading branch information
tbody-cfs committed Dec 1, 2023
1 parent 338e68a commit 28d842b
Show file tree
Hide file tree
Showing 9 changed files with 37 additions and 50 deletions.
38 changes: 7 additions & 31 deletions cfspopcon/algorithms/algorithm_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,21 +81,15 @@ def run(**kwargs: Any) -> xr.Dataset:

return run

def update_dataset(self, dataset: xr.Dataset, in_place: bool = False) -> Optional[xr.Dataset]:
def update_dataset(self, dataset: xr.Dataset, allow_overwrite: bool = True) -> xr.Dataset:
"""Retrieve inputs from passed dataset and return a new dataset combining input and output quantities.
Specifying in_place=True modifies the dataset in place (changing the input), whereas in_place=False will
return a copy of the dataset with the outputs appended.
Args:
dataset: input dataset
in_place: modify the dataset in place, otherwise return a modified dataset keeping the input unchanged.
allow_overwrite: if False, raise an error if trying to write a variable which is already defined in dataset
Returns: modified dataset
"""
if not in_place:
dataset = dataset.copy(deep=True)

input_values = {}
for key in self.input_keys:
if key in dataset.keys():
Expand All @@ -108,14 +102,7 @@ def update_dataset(self, dataset: xr.Dataset, in_place: bool = False) -> Optiona
raise KeyError(f"Key '{key}' not in dataset keys [{sorted_dataset_keys}] or default values [{sorted_default_keys}]")

result = self._function(**input_values)

for key, val in result.items():
dataset[key] = val

if not in_place:
return dataset
else:
return None
return xr.Dataset(result).merge(dataset, join="left", compat=("override" if allow_overwrite else "no_conflicts"))

def __add__(self, other: Union[Algorithm, CompositeAlgorithm]) -> CompositeAlgorithm:
"""Build a CompositeAlgorithm composed of this Algorithm and another Algorithm or CompositeAlgorithm."""
Expand Down Expand Up @@ -277,32 +264,21 @@ def _run(self, **kwargs: Any) -> xr.Dataset:

return xr.Dataset(result)

def update_dataset(self, dataset: xr.Dataset, in_place: bool = False) -> Optional[xr.Dataset]:
def update_dataset(self, dataset: xr.Dataset, allow_overwrite: bool = True) -> xr.Dataset:
"""Retrieve inputs from passed dataset and return a new dataset combining input and output quantities.
Specifying in_place=True modifies the dataset in place (changing the input), whereas in_place=False will
return a copy of the dataset with the outputs appended.
N.b. will not throw a warning if the dataset contains unused elements.
Args:
dataset: input dataset
in_place: modify the dataset in place, otherwise return a modified dataset keeping the input unchanged.
allow_overwrite: if False, raise an error if trying to write a variable which is already defined in dataset
Returns: modified dataset
"""
if not in_place:
dataset = dataset.copy(deep=True)

for alg in self.algorithms:
# We've already used copy on the dataset, so can now call update_dataset with
# in_place = True for each of the algorithms.
alg.update_dataset(dataset, in_place=True)
dataset = alg.update_dataset(dataset, allow_overwrite=allow_overwrite)

if not in_place:
return dataset
else:
return None
return dataset

def __add__(self, other: Union[Algorithm, CompositeAlgorithm]) -> CompositeAlgorithm:
"""Build a CompositeAlgorithm composed of this CompositeAlgorithm and another Algorithm or CompositeAlgorithm."""
Expand Down
3 changes: 2 additions & 1 deletion cfspopcon/algorithms/edge_impurity_concentration.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
"""Run the two point model with a fixed sheath entrance temperature."""

import xarray as xr

from ..atomic_data import read_atomic_data
from ..formulas.scrape_off_layer_model import build_L_int_integrator, calc_required_edge_impurity_concentration
from ..helpers import extend_impurities_array
from ..named_options import Impurity
from ..unit_handling import Unitfull, convert_to_default_units, ureg
from .algorithm_class import Algorithm
from ..helpers import extend_impurities_array

RETURN_KEYS = [
"edge_impurity_concentration",
Expand Down
2 changes: 1 addition & 1 deletion cfspopcon/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def run_popcon(case: str, plots: tuple[str], show: bool) -> None:
dataset = xr.Dataset(input_parameters)

algorithm.validate_inputs(dataset)
algorithm.update_dataset(dataset, in_place=True)
dataset = algorithm.update_dataset(dataset)

output_dir = Path(case) / "output" if Path(case).is_dir() else Path(case).parent / "output"
output_dir.mkdir(exist_ok=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

import numpy as np
import xarray as xr
from scipy.interpolate import InterpolatedUnivariateSpline
from scipy.interpolate import InterpolatedUnivariateSpline # type:ignore[import-untyped]

from ...named_options import Impurity
from ...unit_handling import Unitfull, convert_units, magnitude, ureg, wraps_ufunc
Expand Down Expand Up @@ -42,12 +42,11 @@ def build_L_int_integrator(
Lz_sqrt_Te = Lz_curve * np.sqrt(electron_temp)

interpolator = InterpolatedUnivariateSpline(electron_temp, magnitude(Lz_sqrt_Te))
def L_int(start_temp, stop_temp):
return interpolator.integral(start_temp, stop_temp)

return wraps_ufunc(
input_units=dict(start_temp=ureg.eV, stop_temp=ureg.eV), return_units=dict(L_int=ureg.W * ureg.m**3 * ureg.eV**1.5)
)(L_int)
def L_int(start_temp: float, stop_temp: float) -> float:
return interpolator.integral(start_temp, stop_temp) # type: ignore[no-any-return]

return wraps_ufunc(input_units=dict(start_temp=ureg.eV, stop_temp=ureg.eV), return_units=dict(L_int=ureg.W * ureg.m**3 * ureg.eV**1.5))(L_int) # type: ignore[no-any-return]


def calc_required_edge_impurity_concentration(
Expand Down
4 changes: 2 additions & 2 deletions cfspopcon/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def extend_impurities_array(array: xr.DataArray, species: Union[str, Impurity],
"""
if isinstance(species, xr.DataArray):
species = species.item()

if not isinstance(species, Impurity):
species = Impurity[species.capitalize()]

Expand All @@ -107,4 +107,4 @@ def extend_impurities_array(array: xr.DataArray, species: Union[str, Impurity],
return concentration
else:
other_species = array.sel(dim_species=[s for s in array.dim_species if s != species])
return xr.concat((other_species, concentration), dim="dim_species")
return xr.concat((other_species, concentration), dim="dim_species").sortby("dim_species")
8 changes: 8 additions & 0 deletions cfspopcon/named_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,14 @@ class Impurity(Enum):
Xenon = 54
Tungsten = 74

def __lt__(self, other: "Impurity") -> bool:
"""Implements '<' to allow sorting."""
return self.value < other.value

def __gt__(self, other: "Impurity") -> bool:
"""Implements '>' to allow sorting."""
return self.value > other.value


class ConfinementScaling(Enum):
r"""Enum of implemented \tau_{E} scalings."""
Expand Down
2 changes: 1 addition & 1 deletion docs/doc_sources/getting_started.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1024,7 +1024,7 @@
}
],
"source": [
"algorithm.update_dataset(dataset, in_place=True)\n",
"dataset = algorithm.update_dataset(dataset)\n",
"\n",
"dataset"
]
Expand Down
19 changes: 11 additions & 8 deletions tests/regression_results/PRD.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,10 @@
"dim_species": {
"attrs": {},
"data": [
"Tungsten",
"Helium",
"Oxygen"
"Oxygen",
"Argon",
"Tungsten"
],
"dims": [
"dim_0"
Expand Down Expand Up @@ -598,9 +599,10 @@
"units": "dimensionless"
},
"data": [
1.5e-05,
0.06,
0.0031
0.0031,
0.016889895535634417,
1.5e-05
],
"dims": [
"dim_species"
Expand All @@ -611,9 +613,10 @@
"units": "dimensionless"
},
"data": [
58.37812639204965,
1.9999999473503665,
7.999453250480196
7.999453250480196,
NaN,
58.37812639204965
],
"dims": [
"dim_species"
Expand Down Expand Up @@ -1134,8 +1137,8 @@
}
},
"dims": {
"dim_0": 3,
"dim_0": 4,
"dim_rho": 50,
"dim_species": 3
"dim_species": 4
}
}
Binary file modified tests/regression_results/SPARC_PRD_result.nc
Binary file not shown.

0 comments on commit 28d842b

Please sign in to comment.