From d15caa1a3f89e14507cf096a01e49fad47e9fd41 Mon Sep 17 00:00:00 2001 From: Alexandru Fikl Date: Sat, 26 Oct 2024 14:43:34 +0300 Subject: [PATCH 1/5] pyproject: bump min python to 3.10 --- pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4557ec43e..38f1a3ef3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,13 +7,13 @@ requires = [ [project] name = "pytential" -version = "2020.2" +version = "2024.0" description = "Evaluate layer and volume potentials accurately. Solve integral equations." readme = "README.rst" authors = [ { name = "Andreas Kloeckner", email = "inform@tiker.net" }, ] -requires-python = ">=3.8" +requires-python = ">=3.10" classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", @@ -140,7 +140,7 @@ extend-ignore-re = [ ] [tool.mypy] -python_version = "3.8" +python_version = "3.10" warn_unused_ignores = true exclude = [ "pytential/symbolic/old_diffop_primitives.py", From 3fc399bc35610ecf568ef4c54c0408a5a038f142 Mon Sep 17 00:00:00 2001 From: Alexandru Fikl Date: Sat, 26 Oct 2024 15:13:49 +0300 Subject: [PATCH 2/5] ruff: fix type import errors --- pytential/collection.py | 60 ++++++++++++----------- pytential/linalg/gmres.py | 22 ++++----- pytential/linalg/proxy.py | 25 +++++----- pytential/linalg/skeletonization.py | 72 ++++++++++++++-------------- pytential/linalg/utils.py | 20 ++++---- pytential/muller.py | 13 ++--- pytential/qbx/__init__.py | 31 ++++++------ pytential/qbx/fmmlib.py | 6 +-- pytential/source.py | 17 +++---- pytential/symbolic/compiler.py | 55 +++++++++++---------- pytential/symbolic/dof_connection.py | 4 +- pytential/symbolic/dof_desc.py | 55 +++++++++++---------- pytential/symbolic/execution.py | 23 ++++----- pytential/symbolic/matrix.py | 6 +-- pytential/symbolic/pde/beltrami.py | 16 +++---- pytential/symbolic/pde/scalar.py | 10 ++-- pytential/symbolic/primitives.py | 10 ++-- pytential/target.py | 4 +- pytential/unregularized.py | 8 ++-- 19 files changed, 229 insertions(+), 228 deletions(-) diff --git a/pytential/collection.py b/pytential/collection.py index 66cd09237..ed3276316 100644 --- a/pytential/collection.py +++ b/pytential/collection.py @@ -1,3 +1,5 @@ +from __future__ import annotations + __copyright__ = """ Copyright (C) 2013 Andreas Kloeckner Copyright (C) 2018 Alexandru Fikl @@ -23,7 +25,8 @@ THE SOFTWARE. """ -from typing import Any, Dict, Hashable, Mapping, Optional, Tuple, Union +from collections.abc import Hashable, Mapping +from typing import Any import immutables @@ -37,20 +40,21 @@ from meshmode.discretization import Discretization __doc__ = """ +.. class:: AutoWhereLike + + Types accepted for ``auto_where`` arguments to aid in determining where an + expression is evaluated. + .. class:: GeometryLike Types accepted by the :class:`GeometryCollection`. .. autoclass:: GeometryCollection - .. autofunction:: add_geometry_to_collection """ -GeometryLike = Union[TargetBase, PotentialSource, Discretization] -AutoWhereLike = Union[ - "DOFDescriptorLike", - Tuple["DOFDescriptorLike", "DOFDescriptorLike"] - ] +GeometryLike = TargetBase | PotentialSource | Discretization +AutoWhereLike = DOFDescriptorLike | tuple[DOFDescriptorLike, DOFDescriptorLike] def _is_valid_identifier(name: str) -> bool: @@ -113,12 +117,12 @@ class GeometryCollection: """ def __init__(self, - places: Union[ - "GeometryLike", - Tuple["GeometryLike", "GeometryLike"], - Mapping[Hashable, "GeometryLike"] - ], - auto_where: Optional[AutoWhereLike] = None) -> None: + places: ( + GeometryLike + | tuple[GeometryLike, GeometryLike] + | Mapping[Hashable, GeometryLike] + ), + auto_where: AutoWhereLike | None = None) -> None: r""" :arg places: a scalar, tuple of or mapping of symbolic names to geometry objects. Supported objects are @@ -151,7 +155,7 @@ def __init__(self, elif isinstance(places, TargetBase): places_dict = {auto_target.geometry: places} auto_source = auto_target - if isinstance(places, (Discretization, PotentialSource)): + if isinstance(places, Discretization | PotentialSource): places_dict = { auto_source.geometry: places, auto_target.geometry: places @@ -169,7 +173,7 @@ def __init__(self, self.places = immutables.Map(places_dict) self.auto_where = (auto_source, auto_target) - self._caches: Dict[str, Any] = {} + self._caches: dict[str, Any] = {} # }}} @@ -193,7 +197,7 @@ def __init__(self, # check allowed types for p in self.places.values(): - if not isinstance(p, (PotentialSource, TargetBase, Discretization)): + if not isinstance(p, PotentialSource | TargetBase | Discretization): raise TypeError( "Values in 'places' must be discretization, targets " f"or layer potential sources, got '{type(p).__name__}'") @@ -282,9 +286,7 @@ def _get_qbx_discretization(self, geometry, discr_stage): # }}} - def get_connection(self, - from_dd: "DOFDescriptorLike", - to_dd: "DOFDescriptorLike"): + def get_connection(self, from_dd: DOFDescriptorLike, to_dd: DOFDescriptorLike): """Construct a connection from *from_dd* to *to_dd* geometries. :returns: an object compatible with the @@ -297,8 +299,8 @@ def get_connection(self, def get_discretization( self, geometry: Hashable, - discr_stage: Optional["DiscretizationStages"] = None - ) -> "GeometryLike": + discr_stage: DiscretizationStages | None = None + ) -> GeometryLike: """Get the geometry or discretization in the collection. If a specific QBX stage discretization is requested, refinement is @@ -327,7 +329,7 @@ def get_discretization( else: return discr - def get_geometry(self, geometry: Hashable) -> "GeometryLike": + def get_geometry(self, geometry: Hashable) -> GeometryLike: """ :arg geometry: the identifier of the geometry in the collection. """ @@ -339,9 +341,9 @@ def get_geometry(self, geometry: Hashable) -> "GeometryLike": def copy( self, - places: Optional[Mapping[Hashable, "GeometryLike"]] = None, - auto_where: Optional[AutoWhereLike] = None, - ) -> "GeometryCollection": + places: Mapping[Hashable, GeometryLike] | None = None, + auto_where: AutoWhereLike | None = None, + ) -> GeometryCollection: """Get a shallow copy of the geometry collection.""" return type(self)( places=self.places if places is None else places, @@ -349,8 +351,8 @@ def copy( def merge( self, - places: Union["GeometryCollection", Mapping[Hashable, "GeometryLike"]], - ) -> "GeometryCollection": + places: GeometryCollection | Mapping[Hashable, GeometryLike], + ) -> GeometryCollection: """Merges two geometry collections and returns the new collection. :arg places: a mapping or :class:`GeometryCollection` to @@ -380,7 +382,7 @@ def __str__(self): def add_geometry_to_collection( places: GeometryCollection, - geometries: Mapping[Hashable, "GeometryLike"]) -> GeometryCollection: + geometries: Mapping[Hashable, GeometryLike]) -> GeometryCollection: """Adds a mapping of geometries to an existing collection. This function is similar to :meth:`GeometryCollection.merge`, but it makes @@ -399,7 +401,7 @@ def add_geometry_to_collection( if key in places.places: raise ValueError(f"geometry '{key}' already in the collection") - if not isinstance(geometry, (PointsTarget, PointPotentialSource)): + if not isinstance(geometry, PointsTarget | PointPotentialSource): raise TypeError( f"Cannot add a geometry of type '{type(geometry).__name__}' " "to the existing collection. Construct a new collection " diff --git a/pytential/linalg/gmres.py b/pytential/linalg/gmres.py index 66dd6e58c..fa367fb03 100644 --- a/pytential/linalg/gmres.py +++ b/pytential/linalg/gmres.py @@ -31,9 +31,9 @@ .. autoclass:: ResidualPrinter """ +from collections.abc import Callable, Sequence from dataclasses import dataclass from functools import partial -from typing import Callable, Optional, Sequence import numpy as np @@ -278,16 +278,16 @@ def __call__(self, resid): def gmres( op: Callable[[ArrayOrContainerT], ArrayOrContainerT], rhs: ArrayOrContainerT, - restart: Optional[int] = None, - tol: Optional[float] = None, - x0: Optional[ArrayOrContainerT] = None, - inner_product: Optional[ - Callable[[ArrayOrContainerT, ArrayOrContainerT], float]] = None, - maxiter: Optional[int] = None, - hard_failure: Optional[bool] = None, - no_progress_factor: Optional[float] = None, - stall_iterations: Optional[int] = None, - callback: Optional[Callable[[ArrayOrContainerT], None]] = None, + restart: int | None = None, + tol: float | None = None, + x0: ArrayOrContainerT | None = None, + inner_product: ( + Callable[[ArrayOrContainerT, ArrayOrContainerT], float] | None) = None, + maxiter: int | None = None, + hard_failure: bool | None = None, + no_progress_factor: float | None = None, + stall_iterations: int | None = None, + callback: Callable[[ArrayOrContainerT], None] | None = None, progress: bool = False, require_monotonicity: bool = True) -> GMRESResult: """Solve a linear system :math:`Ax = b` using GMRES with restarts. diff --git a/pytential/linalg/proxy.py b/pytential/linalg/proxy.py index 7714333b5..59822b76d 100644 --- a/pytential/linalg/proxy.py +++ b/pytential/linalg/proxy.py @@ -20,8 +20,9 @@ THE SOFTWARE. """ +from collections.abc import Callable from dataclasses import dataclass -from typing import Any, Callable, Optional +from typing import Any import numpy as np import numpy.linalg as la @@ -68,9 +69,9 @@ def partition_by_nodes( actx: PyOpenCLArrayContext, places: GeometryCollection, *, - dofdesc: Optional["DOFDescriptorLike"] = None, - tree_kind: Optional[str] = "adaptive-level-restricted", - max_particles_in_box: Optional[int] = None) -> IndexList: + dofdesc: DOFDescriptorLike | None = None, + tree_kind: str | None = "adaptive-level-restricted", + max_particles_in_box: int | None = None) -> IndexList: """Generate equally sized ranges of nodes. The partition is created at the lowest level of granularity, i.e. nodes. This results in balanced ranges of points, but will split elements across different ranges. @@ -116,7 +117,7 @@ def partition_by_nodes( ).nonzero() indices: np.ndarray = np.empty(len(leaf_boxes), dtype=object) - starts: Optional[np.ndarray] = None + starts: np.ndarray | None = None for i, ibox in enumerate(leaf_boxes): box_start = tree.box_source_starts[ibox] @@ -230,7 +231,7 @@ class ProxyClusterGeometryData: centers: np.ndarray radii: np.ndarray - _cluster_radii: Optional[np.ndarray] = None + _cluster_radii: np.ndarray | None = None @property def nclusters(self) -> int: @@ -361,11 +362,11 @@ class ProxyGeneratorBase: def __init__( self, places: GeometryCollection, - approx_nproxy: Optional[int] = None, - radius_factor: Optional[float] = None, + approx_nproxy: int | None = None, + radius_factor: float | None = None, norm_type: str = "linf", - _generate_ref_proxies: Optional[Callable[[int], np.ndarray]] = None, + _generate_ref_proxies: Callable[[int], np.ndarray] | None = None, ) -> None: """ :param approx_nproxy: desired number of proxy points. In higher @@ -422,7 +423,7 @@ def get_radii_kernel_ex(self, actx: PyOpenCLArrayContext) -> lp.ExecutorBase: def __call__(self, actx: PyOpenCLArrayContext, - source_dd: Optional["DOFDescriptorLike"], + source_dd: DOFDescriptorLike | None, dof_index: IndexList, **kwargs: Any) -> ProxyClusterGeometryData: """Generate proxy points for each cluster in *dof_index_set* with nodes in @@ -625,7 +626,7 @@ def get_radii_kernel_ex(self, actx: PyOpenCLArrayContext) -> lp.ExecutorBase: def __call__(self, actx: PyOpenCLArrayContext, - source_dd: Optional["DOFDescriptorLike"], + source_dd: DOFDescriptorLike | None, dof_index: IndexList, **kwargs) -> ProxyClusterGeometryData: if source_dd is None: source_dd = self.places.auto_source @@ -651,7 +652,7 @@ def __call__(self, def gather_cluster_neighbor_points( actx: PyOpenCLArrayContext, pxy: ProxyClusterGeometryData, *, - max_particles_in_box: Optional[int] = None) -> IndexList: + max_particles_in_box: int | None = None) -> IndexList: """Generate a set of neighboring points for each cluster of points in *pxy*. Neighboring points of a cluster :math:`i` are defined as all the points inside the proxy ball :math:`i` that do not also diff --git a/pytential/linalg/skeletonization.py b/pytential/linalg/skeletonization.py index a5df89ee5..bfe252dab 100644 --- a/pytential/linalg/skeletonization.py +++ b/pytential/linalg/skeletonization.py @@ -20,9 +20,9 @@ THE SOFTWARE. """ +from collections.abc import Callable, Hashable, Sequence from dataclasses import dataclass -from typing import ( - Any, Callable, Dict, Hashable, Optional, Sequence, Tuple, Union) +from typing import Any import numpy as np @@ -249,9 +249,9 @@ class SkeletonizationWrangler: # operator exprs: np.ndarray - input_exprs: Tuple[sym.var, ...] - domains: Tuple[sym.DOFDescriptor, ...] - context: Dict[str, Any] + input_exprs: tuple[sym.var, ...] + domains: tuple[sym.DOFDescriptor, ...] + context: dict[str, Any] neighbor_cluster_builder: Callable[..., np.ndarray] @@ -296,7 +296,7 @@ def evaluate_source_neighbor_interaction(self, actx: PyOpenCLArrayContext, places: GeometryCollection, pxy: ProxyClusterGeometryData, nbrindex: IndexList, *, - ibrow: int, ibcol: int) -> Tuple[np.ndarray, TargetAndSourceClusterList]: + ibrow: int, ibcol: int) -> tuple[np.ndarray, TargetAndSourceClusterList]: nbr_src_index = TargetAndSourceClusterList(nbrindex, pxy.srcindex) eval_mapper_cls = self.neighbor_cluster_builder @@ -311,7 +311,7 @@ def evaluate_target_neighbor_interaction(self, actx: PyOpenCLArrayContext, places: GeometryCollection, pxy: ProxyClusterGeometryData, nbrindex: IndexList, *, - ibrow: int, ibcol: int) -> Tuple[np.ndarray, TargetAndSourceClusterList]: + ibrow: int, ibcol: int) -> tuple[np.ndarray, TargetAndSourceClusterList]: tgt_nbr_index = TargetAndSourceClusterList(pxy.srcindex, nbrindex) eval_mapper_cls = self.neighbor_cluster_builder @@ -330,7 +330,7 @@ def evaluate_source_proxy_interaction(self, actx: PyOpenCLArrayContext, places: GeometryCollection, pxy: ProxyClusterGeometryData, nbrindex: IndexList, *, - ibrow: int, ibcol: int) -> Tuple[np.ndarray, TargetAndSourceClusterList]: + ibrow: int, ibcol: int) -> tuple[np.ndarray, TargetAndSourceClusterList]: from pytential.collection import add_geometry_to_collection pxy_src_index = TargetAndSourceClusterList(pxy.pxyindex, pxy.srcindex) places = add_geometry_to_collection( @@ -351,7 +351,7 @@ def evaluate_target_proxy_interaction(self, actx: PyOpenCLArrayContext, places: GeometryCollection, pxy: ProxyClusterGeometryData, nbrindex: IndexList, *, - ibrow: int, ibcol: int) -> Tuple[np.ndarray, TargetAndSourceClusterList]: + ibrow: int, ibcol: int) -> tuple[np.ndarray, TargetAndSourceClusterList]: from pytential.collection import add_geometry_to_collection tgt_pxy_index = TargetAndSourceClusterList(pxy.srcindex, pxy.pxyindex) places = add_geometry_to_collection( @@ -378,17 +378,17 @@ def evaluate_target_proxy_interaction(self, def make_skeletonization_wrangler( places: GeometryCollection, - exprs: Union[sym.var, Sequence[sym.var]], - input_exprs: Union[sym.var, Sequence[sym.var]], *, - domains: Optional[Sequence[Hashable]] = None, - context: Optional[Dict[str, Any]] = None, - auto_where: Optional[Union[Hashable, Tuple[Hashable, Hashable]]] = None, + exprs: sym.var | Sequence[sym.var], + input_exprs: sym.var | Sequence[sym.var], *, + domains: Sequence[Hashable] | None = None, + context: dict[str, Any] | None = None, + auto_where: Hashable | tuple[Hashable, Hashable] | None = None, # internal - _weighted_proxy: Optional[Union[bool, Tuple[bool, bool]]] = None, - _proxy_source_cluster_builder: Optional[Callable[..., np.ndarray]] = None, - _proxy_target_cluster_builder: Optional[Callable[..., np.ndarray]] = None, - _neighbor_cluster_builder: Optional[Callable[..., np.ndarray]] = None, + _weighted_proxy: bool | tuple[bool, bool] | None = None, + _proxy_source_cluster_builder: Callable[..., np.ndarray] | None = None, + _proxy_target_cluster_builder: Callable[..., np.ndarray] | None = None, + _neighbor_cluster_builder: Callable[..., np.ndarray] | None = None, ) -> SkeletonizationWrangler: if context is None: context = {} @@ -515,7 +515,7 @@ class _ProxyNeighborEvaluationResult: nbrmat: np.ndarray nbrindex: TargetAndSourceClusterList - def __getitem__(self, i: int) -> Tuple[np.ndarray, np.ndarray]: + def __getitem__(self, i: int) -> tuple[np.ndarray, np.ndarray]: """ :returns: a :class:`tuple` of ``(pxymat, nbrmat)`` containing the :math:`i`-th cluster interactions. The matrices are reshaped into @@ -538,11 +538,11 @@ def _evaluate_proxy_skeletonization_interaction( wrangler: SkeletonizationWrangler, cluster_index: IndexList, *, evaluate_proxy: Callable[..., - Tuple[np.ndarray, TargetAndSourceClusterList]], + tuple[np.ndarray, TargetAndSourceClusterList]], evaluate_neighbor: Callable[..., - Tuple[np.ndarray, TargetAndSourceClusterList]], - dofdesc: Optional[sym.DOFDescriptor] = None, - max_particles_in_box: Optional[int] = None, + tuple[np.ndarray, TargetAndSourceClusterList]], + dofdesc: sym.DOFDescriptor | None = None, + max_particles_in_box: int | None = None, ) -> _ProxyNeighborEvaluationResult: """Evaluate the proxy to cluster and neighbor to cluster interactions for each cluster in *cluster_index*. @@ -572,8 +572,8 @@ def _skeletonize_block_by_proxy_with_mats( proxy_generator: ProxyGeneratorBase, wrangler: SkeletonizationWrangler, tgt_src_index: TargetAndSourceClusterList, *, - id_eps: Optional[float] = None, id_rank: Optional[int] = None, - max_particles_in_box: Optional[int] = None + id_eps: float | None = None, id_rank: int | None = None, + max_particles_in_box: int | None = None ) -> "SkeletonizationResult": nclusters = tgt_src_index.nclusters if nclusters == 1: @@ -718,8 +718,8 @@ class SkeletonizationResult: # NOTE: these are meant only for testing! They contain the interactions # between the source / target points and their proxies / neighbors. - _src_eval_result: Optional[_ProxyNeighborEvaluationResult] = None - _tgt_eval_result: Optional[_ProxyNeighborEvaluationResult] = None + _src_eval_result: _ProxyNeighborEvaluationResult | None = None + _tgt_eval_result: _ProxyNeighborEvaluationResult | None = None def __post_init__(self): if __debug__: @@ -747,17 +747,17 @@ def skeletonize_by_proxy( places: GeometryCollection, tgt_src_index: TargetAndSourceClusterList, - exprs: Union[sym.var, Sequence[sym.var]], - input_exprs: Union[sym.var, Sequence[sym.var]], *, - domains: Optional[Sequence[Hashable]] = None, - context: Optional[Dict[str, Any]] = None, + exprs: sym.var | Sequence[sym.var], + input_exprs: sym.var | Sequence[sym.var], *, + domains: Sequence[Hashable] | None = None, + context: dict[str, Any] | None = None, - approx_nproxy: Optional[int] = None, - proxy_radius_factor: Optional[float] = None, + approx_nproxy: int | None = None, + proxy_radius_factor: float | None = None, - id_eps: Optional[float] = None, - id_rank: Optional[int] = None, - max_particles_in_box: Optional[int] = None) -> np.ndarray: + id_eps: float | None = None, + id_rank: int | None = None, + max_particles_in_box: int | None = None) -> np.ndarray: r"""Evaluate and skeletonize a symbolic expression using proxy-based methods. :arg tgt_src_index: a :class:`~pytential.linalg.TargetAndSourceClusterList` diff --git a/pytential/linalg/utils.py b/pytential/linalg/utils.py index 9900eb2f2..17895b125 100644 --- a/pytential/linalg/utils.py +++ b/pytential/linalg/utils.py @@ -21,7 +21,7 @@ """ from dataclasses import dataclass -from typing import Any, Optional, Tuple, TYPE_CHECKING +from typing import Any, TYPE_CHECKING import numpy as np import numpy.linalg as la @@ -153,14 +153,14 @@ def _flat_cluster_starts(self): def _flat_total_size(self): return self._flat_cluster_starts[-1] - def cluster_shape(self, i: int, j: int) -> Tuple[int, int]: + def cluster_shape(self, i: int, j: int) -> tuple[int, int]: r""" :returns: the shape of the cluster ``(i, j)``, where *i* indexes into the :attr:`targets` and *j* into the :attr:`sources`. """ return (self.targets.cluster_size(i), self.sources.cluster_size(j)) - def cluster_indices(self, i: int, j: int) -> Tuple[np.ndarray, np.ndarray]: + def cluster_indices(self, i: int, j: int) -> tuple[np.ndarray, np.ndarray]: """ :returns: a view into the indices that make up the cluster ``(i, j)``. """ @@ -192,7 +192,7 @@ def flat_cluster_take(self, x: np.ndarray, i: int) -> np.ndarray: def make_index_list( indices: np.ndarray, - starts: Optional[np.ndarray] = None) -> IndexList: + starts: np.ndarray | None = None) -> IndexList: """Wrap a ``(indices, starts)`` tuple into an :class:`IndexList`. :param starts: if *None*, then *indices* is expected to be an object @@ -213,7 +213,7 @@ def make_index_list( def make_index_cluster_cartesian_product( actx: PyOpenCLArrayContext, - mindex: TargetAndSourceClusterList) -> Tuple[Array, Array]: + mindex: TargetAndSourceClusterList) -> tuple[Array, Array]: """Constructs a cluster by cluster Cartesian product of all the indices in *mindex*. @@ -326,8 +326,8 @@ def make_flat_cluster_diag( # {{{ interpolative decomposition def interp_decomp( - A: np.ndarray, *, rank: Optional[int], eps: Optional[float], - ) -> Tuple[int, np.ndarray, np.ndarray]: + A: np.ndarray, *, rank: int | None, eps: float | None, + ) -> tuple[int, np.ndarray, np.ndarray]: """Wrapper for :func:`~scipy.linalg.interpolative.interp_decomp` that always has the same output signature. @@ -354,8 +354,8 @@ def interp_decomp( def cluster_skeletonization_error( mat: np.ndarray, skeleton: "SkeletonizationResult", *, - ord: Optional[float] = None, - relative: bool = False) -> Tuple[np.ndarray, np.ndarray]: + ord: float | None = None, + relative: bool = False) -> tuple[np.ndarray, np.ndarray]: r"""Evaluate the cluster-wise skeletonization errors for the given *skeleton*. Errors are computed for all interactions between cluster :math:`i` and @@ -428,7 +428,7 @@ def mnorm(x: np.ndarray, y: np.ndarray) -> "np.floating[Any]": def skeletonization_error( mat: np.ndarray, skeleton: "SkeletonizationResult", *, - ord: Optional[float] = None, + ord: float | None = None, relative: bool = False) -> "np.floating[Any]": r"""Computes the skeletonization error for the entire matrix *mat*. diff --git a/pytential/muller.py b/pytential/muller.py index e662a25fc..5e4068753 100644 --- a/pytential/muller.py +++ b/pytential/muller.py @@ -20,7 +20,8 @@ THE SOFTWARE. """ -from typing import Callable, List, Optional, Tuple, TypeVar +from collections.abc import Callable +from typing import TypeVar import numpy as np @@ -31,8 +32,8 @@ def muller_deflate( f: Callable[[T], T], n: int, *, maxiter: int = 100, eps: float = 1.0e-14, - z_start: Optional[np.ndarray] = None, - ) -> Tuple[List[T], List[int]]: + z_start: np.ndarray | None = None, + ) -> tuple[list[T], list[int]]: """ :arg n: number of zeros sought. :returns: a tuple of ``(roots, niter)``, where *roots* is a list of roots @@ -40,8 +41,8 @@ def muller_deflate( find each root. """ # initialize variables - roots: List[T] = [] - niter: List[int] = [] + roots: list[T] = [] + niter: list[int] = [] def f_deflated(z: T) -> T: y = f(z) @@ -72,7 +73,7 @@ def f_deflated(z: T) -> T: def muller(f: Callable[[T], T], *, maxiter: int = 100, tol: float = 1.0e-11, - z_start: Optional[np.ndarray] = None) -> Tuple[T, int]: + z_start: np.ndarray | None = None) -> tuple[T, int]: """Find a root of the complex-valued function *f* defined in the complex plane using Muller's method. diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index 03ee21f73..092b785bd 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -20,8 +20,8 @@ THE SOFTWARE. """ +from collections.abc import Callable from functools import partial -from typing import Callable, Optional, Union import numpy as np @@ -93,15 +93,12 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): def __init__( self, density_discr: Discretization, - fine_order: Optional[int], - qbx_order: Optional[int] = None, - fmm_order: Optional[Union[bool, int]] = None, - fmm_level_to_order: Optional[ - Union[bool, Callable[..., int]] - ] = None, - expansion_factory: Optional[DefaultExpansionFactoryBase] = None, - target_association_tolerance: Optional[ - float] = _not_provided, # type: ignore[assignment] + fine_order: int | None, + qbx_order: int | None = None, + fmm_order: bool | int | None = None, + fmm_level_to_order: bool | Callable[..., int] | None = None, + expansion_factory: DefaultExpansionFactoryBase | None = None, + target_association_tolerance: float | None = _not_provided, # type: ignore[assignment] # begin experimental arguments # FIXME default debug=False once everything has matured @@ -109,15 +106,15 @@ def __init__( _disable_refinement: bool = False, _expansions_in_tree_have_extent: bool = True, _expansion_stick_out_factor: float = 0.5, - _max_leaf_refine_weight: Optional[int] = None, - _box_extent_norm: Optional[str] = None, + _max_leaf_refine_weight: int | None = None, + _box_extent_norm: str | None = None, _tree_kind: str = "adaptive", _well_sep_is_n_away: int = 2, - _from_sep_smaller_crit: Optional[str] = None, - _from_sep_smaller_min_nsources_cumul: Optional[int] = None, - _use_target_specific_qbx: Optional[bool] = None, - geometry_data_inspector: Optional[Callable[..., bool]] = None, - cost_model: Optional[AbstractQBXCostModel] = None, + _from_sep_smaller_crit: str | None = None, + _from_sep_smaller_min_nsources_cumul: int | None = None, + _use_target_specific_qbx: bool | None = None, + geometry_data_inspector: Callable[..., bool] | None = None, + cost_model: AbstractQBXCostModel | None = None, fmm_backend: str = "sumpy", ) -> None: """ diff --git a/pytential/qbx/fmmlib.py b/pytential/qbx/fmmlib.py index adb1b5298..89b09cca5 100644 --- a/pytential/qbx/fmmlib.py +++ b/pytential/qbx/fmmlib.py @@ -128,16 +128,16 @@ def is_supported_helmknl(knl): if isinstance(knl, DirectionalSourceDerivative): knl = knl.inner_kernel - return (isinstance(knl, (LaplaceKernel, HelmholtzKernel)) + return (isinstance(knl, LaplaceKernel | HelmholtzKernel) and knl.dim in (2, 3)) @staticmethod def is_supported_helmknl_for_tsqbx(knl): # Supports at most one derivative. - if isinstance(knl, (DirectionalSourceDerivative, AxisTargetDerivative)): + if isinstance(knl, DirectionalSourceDerivative | AxisTargetDerivative): knl = knl.inner_kernel - return (isinstance(knl, (LaplaceKernel, HelmholtzKernel)) + return (isinstance(knl, LaplaceKernel | HelmholtzKernel) and knl.dim == 3) @property diff --git a/pytential/source.py b/pytential/source.py index 48cc46e6f..8752ba05d 100644 --- a/pytential/source.py +++ b/pytential/source.py @@ -20,8 +20,9 @@ THE SOFTWARE. """ +from collections.abc import Hashable from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Hashable, Optional, Tuple +from typing import TYPE_CHECKING import numpy as np from arraycontext import PyOpenCLArrayContext, flatten, unflatten @@ -77,7 +78,7 @@ def complex_dtype(self): """:class:`~numpy.dtype` of complex data living on the source geometry.""" @abstractmethod - def op_group_features(self, expr: sym.IntG) -> Tuple[Hashable, ...]: + def op_group_features(self, expr: sym.IntG) -> tuple[Hashable, ...]: """ :arg expr: a subclass of :class:`~pytential.symbolic.primitives.IntG`. :returns: a characteristic tuple by which operators that can be @@ -87,8 +88,8 @@ def op_group_features(self, expr: sym.IntG) -> Tuple[Hashable, ...]: @abstractmethod def get_p2p(self, actx: PyOpenCLArrayContext, - target_kernels: Tuple[Kernel, ...], - source_kernels: Optional[Tuple[Kernel, ...]] = None) -> P2PBase: + target_kernels: tuple[Kernel, ...], + source_kernels: tuple[Kernel, ...] | None = None) -> P2PBase: """ :returns: a subclass of :class:`~sumpy.p2p.P2PBase` for evaluating the *target_kernels* and the *source_kernels* on the source geometry. @@ -110,11 +111,11 @@ class _SumpyP2PMixin: def get_p2p(self, actx: PyOpenCLArrayContext, - target_kernels: Tuple[Kernel, ...], - source_kernels: Optional[Tuple[Kernel, ...]] = None) -> P2PBase: + target_kernels: tuple[Kernel, ...], + source_kernels: tuple[Kernel, ...] | None = None) -> P2PBase: @memoize_in(actx, (_SumpyP2PMixin, "p2p")) - def p2p(target_kernels: Tuple[Kernel, ...], - source_kernels: Optional[Tuple[Kernel, ...]]) -> P2PBase: + def p2p(target_kernels: tuple[Kernel, ...], + source_kernels: tuple[Kernel, ...] | None) -> P2PBase: if any(knl.is_complex_valued for knl in target_kernels): value_dtype = self.complex_dtype # type: ignore[attr-defined] else: diff --git a/pytential/symbolic/compiler.py b/pytential/symbolic/compiler.py index 39827d808..f5c93db99 100644 --- a/pytential/symbolic/compiler.py +++ b/pytential/symbolic/compiler.py @@ -20,11 +20,10 @@ THE SOFTWARE. """ +from collections.abc import AbstractSet, Collection, Iterator, Hashable, Sequence from dataclasses import dataclass from functools import reduce -from typing import ( - AbstractSet, Any, Collection, Tuple, Dict, Hashable, List, - Optional, Sequence, Set, Iterator) +from typing import Any import numpy as np @@ -45,15 +44,15 @@ class Statement: .. attribute:: exprs .. attribute:: priority """ - names: List[str] - exprs: List[Expression] + names: list[str] + exprs: list[Expression] priority: int - def get_assignees(self) -> Set[str]: + def get_assignees(self) -> set[str]: raise NotImplementedError( f"get_assignees for '{self.__class__.__name__}'") - def get_dependencies(self, dep_mapper: DependencyMapper) -> Set[Expression]: + def get_dependencies(self, dep_mapper: DependencyMapper) -> set[Expression]: raise NotImplementedError( f"get_dependencies for '{self.__class__.__name__}'") @@ -71,7 +70,7 @@ class Assign(Statement): expression that is not needed beyond this assignment. """ - do_not_return: Optional[List[bool]] = None + do_not_return: list[bool] | None = None comment: str = "" def __post_init__(self): @@ -81,7 +80,7 @@ def __post_init__(self): def get_assignees(self): return set(self.names) - def get_dependencies(self, dep_mapper: DependencyMapper) -> Set[Expression]: + def get_dependencies(self, dep_mapper: DependencyMapper) -> set[Expression]: from operator import or_ deps = reduce(or_, (dep_mapper(expr) for expr in self.exprs)) @@ -179,17 +178,17 @@ class ComputePotential(Statement): .. attribute:: source """ - outputs: List[PotentialOutput] - target_kernels: List[Kernel] - kernel_arguments: Dict[str, Any] - source_kernels: List[Kernel] - densities: List[Expression] + outputs: list[PotentialOutput] + target_kernels: list[Kernel] + kernel_arguments: dict[str, Any] + source_kernels: list[Kernel] + densities: list[Expression] source: DOFDescriptor def get_assignees(self): return {o.name for o in self.outputs} - def get_dependencies(self, dep_mapper: DependencyMapper) -> Set[Expression]: + def get_dependencies(self, dep_mapper: DependencyMapper) -> set[Expression]: result = dep_mapper(self.densities[0]) for density in self.densities[1:]: result.update(dep_mapper(density)) @@ -327,7 +326,7 @@ class Code: def __init__( self, inputs: AbstractSet[str], - schedule: Sequence[Tuple[Statement, Collection[str]]], + schedule: Sequence[tuple[Statement, Collection[str]]], result: np.ndarray, ) -> None: self.inputs = inputs @@ -335,7 +334,7 @@ def __init__( self.result = result @property - def statements(self) -> List[Statement]: + def statements(self) -> list[Statement]: return [stmt for stmt, _discardable_vars in self._schedule] def __str__(self) -> str: @@ -361,7 +360,7 @@ def _get_next_step( result: np.ndarray, available_names: AbstractSet[str], done_stmts: AbstractSet[Statement] - ) -> Tuple[Statement, Set[str]]: + ) -> tuple[Statement, set[str]]: from pytools import argmax2 available_stmts = [ @@ -410,14 +409,14 @@ def _compute_schedule( dep_mapper: DependencyMapper, statements: Sequence[Statement], result: np.ndarray, - ) -> Tuple[Set[str], List[Tuple[Statement, Set[str]]]]: + ) -> tuple[set[str], list[tuple[Statement, set[str]]]]: # FIXME: I'm O(n**2). I want to be replaced with a normal topological sort. schedule = [] - done_stmts: Set[Statement] = set() + done_stmts: set[Statement] = set() - inputs: Set[str] = { + inputs: set[str] = { dep.name for stmt in set(statements) for dep in stmt.get_dependencies(dep_mapper) @@ -464,10 +463,10 @@ def __init__( self.places = places self.prefix = prefix - self.code: List[Statement] = [] - self.expr_to_var: Dict[Expression, Variable] = {} - self.assigned_names: Set[str] = set() - self.group_to_operators: Dict[Hashable, Set[IntG]] = {} + self.code: list[Statement] = [] + self.expr_to_var: dict[Expression, Variable] = {} + self.assigned_names: set[str] = set() + self.group_to_operators: dict[Hashable, set[IntG]] = {} self.dep_mapper = DependencyMapper( # include_operator_bindings=False, include_lookups=False, @@ -510,7 +509,7 @@ def __call__(self, expr): # {{{ variables and names - def get_var_name(self, prefix: Optional[str] = None) -> str: + def get_var_name(self, prefix: str | None = None) -> str: def generate_suffixes() -> Iterator[str]: yield "" i = 2 @@ -545,14 +544,14 @@ def make_assign( priority=priority) def assign_to_new_var( - self, expr: Expression, priority: int = 0, prefix: Optional[str] = None, + self, expr: Expression, priority: int = 0, prefix: str | None = None, ) -> Variable: from pymbolic.primitives import Subscript # Observe that the only things that can be legally subscripted # are variables. All other expressions are broken down into # their scalar components. - if isinstance(expr, (Variable, Subscript)): + if isinstance(expr, Variable | Subscript): return expr new_name = self.get_var_name(prefix) diff --git a/pytential/symbolic/dof_connection.py b/pytential/symbolic/dof_connection.py index 5f52aff62..65c5e5c68 100644 --- a/pytential/symbolic/dof_connection.py +++ b/pytential/symbolic/dof_connection.py @@ -141,7 +141,7 @@ def __call__(self, arys): If *arys* was a pair of arrays :math:`(x, y)`, they are interleaved as :math:`[x_1, y_1, x_2, y_2, \ddots, x_n, y_n]`. """ - if isinstance(arys, (list, tuple)): + if isinstance(arys, list | tuple): ary1, ary2 = arys else: ary1, ary2 = arys, arys @@ -196,7 +196,7 @@ def __init__(self, connections, from_dd=None, to_dd=None): from meshmode.discretization.connection import DiscretizationConnection for conn in self.connections: if not isinstance(conn, - (DiscretizationConnection, GranularityConnection)): + DiscretizationConnection | GranularityConnection): raise ValueError("unsupported connection type: {type(conn)}") if self.connections: diff --git a/pytential/symbolic/dof_desc.py b/pytential/symbolic/dof_desc.py index b8fb28b1d..b7190576f 100644 --- a/pytential/symbolic/dof_desc.py +++ b/pytential/symbolic/dof_desc.py @@ -1,3 +1,5 @@ +from __future__ import annotations + __copyright__ = "Copyright (C) 2010-2013 Andreas Kloeckner" __license__ = """ @@ -20,7 +22,8 @@ THE SOFTWARE. """ -from typing import Any, Hashable, Optional, Type, Union +from collections.abc import Hashable +from typing import Any __doc__ = """ .. autoclass:: DEFAULT_SOURCE @@ -159,9 +162,9 @@ class DOFDescriptor: """ def __init__(self, - geometry: Optional[Hashable] = None, - discr_stage: Optional["DiscretizationStages"] = None, - granularity: Optional["DOFGranularities"] = None): + geometry: Hashable | None = None, + discr_stage: DiscretizationStages | None = None, + granularity: DOFGranularities | None = None): if granularity is None: granularity = GRANULARITY_NODE @@ -181,10 +184,9 @@ def __init__(self, self.granularity = granularity def copy(self, - geometry: Optional[Hashable] = None, - discr_stage: Optional[ - "DiscretizationStages"] = _NoArgSentinel, # type: ignore[assignment] - granularity: Optional["DOFGranularities"] = None) -> "DOFDescriptor": + geometry: Hashable | None = None, + discr_stage: DiscretizationStages | None = _NoArgSentinel, # type: ignore[assignment] + granularity: DOFGranularities | None = None) -> DOFDescriptor: if isinstance(geometry, DOFDescriptor): discr_stage = geometry.discr_stage \ if discr_stage is _NoArgSentinel else discr_stage @@ -199,13 +201,13 @@ def copy(self, if discr_stage is _NoArgSentinel else discr_stage), ) - def to_stage1(self) -> "DOFDescriptor": + def to_stage1(self) -> DOFDescriptor: return self.copy(discr_stage=QBX_SOURCE_STAGE1) - def to_stage2(self) -> "DOFDescriptor": + def to_stage2(self) -> DOFDescriptor: return self.copy(discr_stage=QBX_SOURCE_STAGE2) - def to_quad_stage2(self) -> "DOFDescriptor": + def to_quad_stage2(self) -> DOFDescriptor: return self.copy(discr_stage=QBX_SOURCE_QUAD_STAGE2) def __hash__(self) -> int: @@ -255,7 +257,7 @@ def __str__(self) -> str: return "/".join(name) -def as_dofdesc(desc: "DOFDescriptorLike") -> "DOFDescriptor": +def as_dofdesc(desc: DOFDescriptorLike) -> DOFDescriptor: if isinstance(desc, DOFDescriptor): return desc @@ -273,21 +275,18 @@ def as_dofdesc(desc: "DOFDescriptorLike") -> "DOFDescriptor": # {{{ type annotations -DiscretizationStages = Union[ - Type[QBX_SOURCE_STAGE1], - Type[QBX_SOURCE_STAGE2], - Type[QBX_SOURCE_QUAD_STAGE2], - ] - -DOFGranularities = Union[ - Type[GRANULARITY_NODE], - Type[GRANULARITY_CENTER], - Type[GRANULARITY_ELEMENT], - ] - -DOFDescriptorLike = Union[ - DOFDescriptor, - Hashable - ] +DiscretizationStages = ( + type[QBX_SOURCE_STAGE1] + | type[QBX_SOURCE_STAGE2] + | type[QBX_SOURCE_QUAD_STAGE2] + ) + +DOFGranularities = ( + type[GRANULARITY_NODE] + | type[GRANULARITY_CENTER] + | type[GRANULARITY_ELEMENT] + ) + +DOFDescriptorLike = DOFDescriptor | Hashable # }}} diff --git a/pytential/symbolic/execution.py b/pytential/symbolic/execution.py index f497ed4a9..b39c276d0 100644 --- a/pytential/symbolic/execution.py +++ b/pytential/symbolic/execution.py @@ -23,7 +23,8 @@ THE SOFTWARE. """ -from typing import Any, Dict, Hashable, List, Optional, Sequence, Tuple, Union +from collections.abc import Hashable, Sequence +from typing import Any from pymbolic.mapper.evaluator import ( EvaluationMapper as PymbolicEvaluationMapper) @@ -251,7 +252,7 @@ def map_interpolation(self, expr): list, np.ndarray, DOFArray)): conn = self.places.get_connection(expr.from_dd, expr.to_dd) return conn(operand) - elif isinstance(operand, (int, float, complex, np.number)): + elif isinstance(operand, int | float | complex | np.number): return operand else: raise TypeError(f"cannot interpolate '{type(operand).__name__}'") @@ -537,8 +538,8 @@ def matvec(self, x): def _prepare_domains( nresults: int, places: GeometryCollection, - domains: Optional[Union[DOFDescriptorLike, Sequence[DOFDescriptorLike]]], - default_domain: Optional[DOFDescriptorLike]) -> List[DOFDescriptor]: + domains: DOFDescriptorLike | Sequence[DOFDescriptorLike] | None, + default_domain: DOFDescriptorLike | None) -> list[DOFDescriptor]: """ :arg nresults: number of results. :arg places: a :class:`~pytential.collection.GeometryCollection`. @@ -552,7 +553,7 @@ def _prepare_domains( """ if domains is None: return nresults * [sym.as_dofdesc(default_domain)] - elif not isinstance(domains, (list, tuple)): + elif not isinstance(domains, list | tuple): return nresults * [sym.as_dofdesc(domains)] else: assert len(domains) == nresults @@ -561,8 +562,8 @@ def _prepare_domains( def _prepare_auto_where( auto_where: AutoWhereLike, - places: Optional[GeometryCollection] = None, - ) -> Tuple[DOFDescriptor, DOFDescriptor]: + places: GeometryCollection | None = None, + ) -> tuple[DOFDescriptor, DOFDescriptor]: """ :arg places: a :class:`pytential.collection.GeometryCollection`, whose :attr:`pytential.collection.GeometryCollection.auto_where` is @@ -578,7 +579,7 @@ def _prepare_auto_where( auto_target: Hashable = _UNNAMED_TARGET else: auto_source, auto_target = places.auto_where - elif isinstance(auto_where, (list, tuple)): + elif isinstance(auto_where, list | tuple): auto_source, auto_target = auto_where else: auto_source = auto_where @@ -659,8 +660,8 @@ def execute(code: Code, exec_mapper, pre_assign_check=None) -> np.ndarray: # {{{ bound expression def _find_array_context_from_args_in_context( - context: Dict[str, Any], - supplied_array_context: Optional[PyOpenCLArrayContext] = None, + context: dict[str, Any], + supplied_array_context: PyOpenCLArrayContext | None = None, ) -> PyOpenCLArrayContext: from arraycontext import PyOpenCLArrayContext array_contexts = [] @@ -813,7 +814,7 @@ def scipy_op( arg_name, dtype, total_dofs, discrs, starts_and_ends, extra_args) def eval(self, context=None, timing_data=None, - array_context: Optional[PyOpenCLArrayContext] = None): + array_context: PyOpenCLArrayContext | None = None): """Evaluate the expression in *self*, using the input variables given in the dictionary *context*. diff --git a/pytential/symbolic/matrix.py b/pytential/symbolic/matrix.py index 78a7675c4..60aed6768 100644 --- a/pytential/symbolic/matrix.py +++ b/pytential/symbolic/matrix.py @@ -37,7 +37,7 @@ # {{{ helpers def is_zero(x): - return isinstance(x, (int, float, complex, np.number)) and x == 0 + return isinstance(x, int | float | complex | np.number) and x == 0 def _get_layer_potential_args(actx, places, expr, context=None, include_args=None): @@ -166,7 +166,7 @@ def map_product(self, expr): if is_zero(rec_child): return 0 - if isinstance(rec_child, (np.number, int, float, complex)): + if isinstance(rec_child, np.number | int | float | complex): vecs_and_scalars = vecs_and_scalars * rec_child elif isinstance(rec_child, np.ndarray): if self.is_kind_matrix(rec_child): @@ -326,7 +326,7 @@ def map_interpolation(self, expr): operand = self.rec(expr.operand) actx = self.array_context - if isinstance(operand, (int, float, complex, np.number)): + if isinstance(operand, int | float | complex | np.number): return operand elif isinstance(operand, np.ndarray) and operand.ndim == 1: conn = self.places.get_connection(expr.from_dd, expr.to_dd) diff --git a/pytential/symbolic/pde/beltrami.py b/pytential/symbolic/pde/beltrami.py index 3d18c14be..0749ecf72 100644 --- a/pytential/symbolic/pde/beltrami.py +++ b/pytential/symbolic/pde/beltrami.py @@ -28,7 +28,7 @@ """ from functools import partial -from typing import Any, Dict, Optional +from typing import Any import numpy as np @@ -80,9 +80,9 @@ class BeltramiOperator: """ def __init__(self, kernel: Kernel, *, - dim: Optional[int] = None, + dim: int | None = None, precond: str = "left", - kernel_arguments: Optional[Dict[str, Any]] = None) -> None: + kernel_arguments: dict[str, Any] | None = None) -> None: if dim is None: dim = kernel.dim - 1 @@ -144,7 +144,7 @@ def prepare_rhs(self, b: sym.var) -> sym.var: def operator(self, sigma: sym.var, - mean_curvature: Optional[sym.var] = None, + mean_curvature: sym.var | None = None, **kwargs) -> sym.var: """ :arg mean_curvature: an expression for the mean curvature that can be @@ -223,7 +223,7 @@ class LaplaceBeltramiOperator(BeltramiOperator): """ def __init__(self, ambient_dim, *, - dim: Optional[int] = None, + dim: int | None = None, precond: str = "left") -> None: from sumpy.kernel import LaplaceKernel super().__init__( @@ -233,7 +233,7 @@ def __init__(self, ambient_dim, *, def operator(self, sigma: sym.var, - mean_curvature: Optional[sym.var] = None, + mean_curvature: sym.var | None = None, **kwargs) -> sym.var: """ :arg mean_curvature: an expression for the mean curvature that can be @@ -305,7 +305,7 @@ class YukawaBeltramiOperator(BeltramiOperator): """ def __init__(self, ambient_dim: int, *, - dim: Optional[int] = None, + dim: int | None = None, precond: str = "left", yukawa_k_name: str = "k") -> None: from sumpy.kernel import YukawaKernel @@ -334,7 +334,7 @@ class HelmholtzBeltramiOperator(BeltramiOperator): """ def __init__(self, ambient_dim: int, *, - dim: Optional[int] = None, + dim: int | None = None, precond: str = "left", helmholtz_k_name: str = "k") -> None: from sumpy.kernel import HelmholtzKernel diff --git a/pytential/symbolic/pde/scalar.py b/pytential/symbolic/pde/scalar.py index 6aed1fcf7..fd7bb4cda 100644 --- a/pytential/symbolic/pde/scalar.py +++ b/pytential/symbolic/pde/scalar.py @@ -27,7 +27,7 @@ .. autoclass:: BiharmonicClampedPlateOperator """ -from typing import Any, Dict, Optional, Union +from typing import Any import numpy as np @@ -155,9 +155,9 @@ class DirichletOperator(L2WeightedPDEOperator): """ def __init__(self, kernel: Kernel, loc_sign: int, *, - alpha: Optional[Union[int, float, complex]] = None, + alpha: int | float | complex | None = None, use_l2_weighting: bool = False, - kernel_arguments: Optional[Dict[str, Any]] = None): + kernel_arguments: dict[str, Any] | None = None): """ :param loc_sign: :math:`+1` for exterior or :math:`-1` for interior problems. @@ -294,10 +294,10 @@ class NeumannOperator(L2WeightedPDEOperator): """ def __init__(self, kernel: Kernel, loc_sign: int, *, - alpha: Optional[Union[int, float, complex]] = None, + alpha: int | float | complex | None = None, use_improved_operator: bool = True, use_l2_weighting: bool = False, - kernel_arguments: Optional[Dict[str, Any]] = None): + kernel_arguments: dict[str, Any] | None = None): """ :param loc_sign: :math:`+1` for exterior or :math:`-1` for interior problems. diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 0b30380b7..6531e207c 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -23,7 +23,7 @@ from sys import intern from warnings import warn from functools import partial -from typing import ClassVar, Tuple +from typing import ClassVar import numpy as np @@ -397,7 +397,7 @@ class DiscretizationProperty(Expression): .. attribute:: dofdesc """ - init_arg_names: ClassVar[Tuple[str, ...]] = ("dofdesc",) + init_arg_names: ClassVar[tuple[str, ...]] = ("dofdesc",) def __init__(self, dofdesc=None): """ @@ -1065,7 +1065,7 @@ def __new__(cls, operand=None): # object array of the operator applied to each of the # coefficients in the multivector. - if isinstance(operand, (np.ndarray, MultiVector)): + if isinstance(operand, np.ndarray | MultiVector): def make_op(operand_i): return cls(operand_i) @@ -1116,7 +1116,7 @@ def __new__(cls, operand=None, dofdesc=None): # object array of the operator applied to each of the # coefficients in the multivector. - if isinstance(operand, (np.ndarray, MultiVector)): + if isinstance(operand, np.ndarray | MultiVector): def make_op(operand_i): return cls(operand_i, dofdesc) @@ -1595,7 +1595,7 @@ def make_op(operand_i): qbx_forced_limit=qbx_forced_limit, source=source, target=target, kernel_arguments=kernel_arguments, **kwargs) - if isinstance(density, (np.ndarray, MultiVector)): + if isinstance(density, np.ndarray | MultiVector): return componentwise(make_op, density) else: return make_op(density) diff --git a/pytential/target.py b/pytential/target.py index 2b87bbde8..c1b6727c1 100644 --- a/pytential/target.py +++ b/pytential/target.py @@ -37,7 +37,7 @@ """ from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from arraycontext.context import Array from pytools import T @@ -87,7 +87,7 @@ class PointsTarget(TargetBase): .. automethod:: preprocess_optemplate """ - def __init__(self, nodes: Array, normals: Optional[Array] = None) -> None: + def __init__(self, nodes: Array, normals: Array | None = None) -> None: self._nodes = nodes self.normals = normals diff --git a/pytential/unregularized.py b/pytential/unregularized.py index 412efe90b..06b3b0a2d 100644 --- a/pytential/unregularized.py +++ b/pytential/unregularized.py @@ -22,7 +22,7 @@ THE SOFTWARE. """ -from typing import Any, Dict, Tuple +from typing import Any import numpy as np @@ -180,7 +180,7 @@ def exec_compute_potential_insn_direct(self, actx: PyOpenCLArrayContext, results.append((o.name, result)) - timing_data: Dict[str, Any] = {} + timing_data: dict[str, Any] = {} return results, timing_data # {{{ fmm-based execution @@ -223,7 +223,7 @@ def exec_compute_potential_insn_fmm(self, actx: PyOpenCLArrayContext, from pytential.collection import GeometryLike target_name_to_index = {} - targets: Tuple[GeometryLike, ...] = () + targets: tuple[GeometryLike, ...] = () for o in insn.outputs: assert o.qbx_forced_limit not in (-1, 1) @@ -297,7 +297,7 @@ def exec_compute_potential_insn_fmm(self, actx: PyOpenCLArrayContext, # }}} - timing_data: Dict[str, Any] = {} + timing_data: dict[str, Any] = {} return results, timing_data # }}} From 9baf751533aef9a9b31cfe0a2e82bbd5da5071c1 Mon Sep 17 00:00:00 2001 From: Alexandru Fikl Date: Sat, 26 Oct 2024 15:25:53 +0300 Subject: [PATCH 3/5] ruff: fix zip strict argument --- pytential/linalg/proxy.py | 3 +- pytential/qbx/fmm.py | 5 +- pytential/qbx/geometry.py | 14 +++-- pytential/qbx/utils.py | 2 +- pytential/symbolic/compiler.py | 7 ++- pytential/symbolic/dof_connection.py | 2 +- pytential/symbolic/execution.py | 6 +- pytential/symbolic/mappers.py | 10 +-- pytential/symbolic/matrix.py | 8 +-- pytential/symbolic/pde/maxwell/__init__.py | 2 +- pytential/unregularized.py | 3 +- pytential/utils.py | 2 +- test/extra_curve_data.py | 7 ++- test/extra_int_eq_data.py | 71 +++++++++++----------- test/extra_matrix_data.py | 17 +++--- test/test_beltrami.py | 2 +- test/test_global_qbx.py | 5 +- test/test_linalg_skeletonization.py | 4 +- test/test_stokes.py | 10 +-- test/test_symbolic.py | 2 +- test/test_tools.py | 3 +- 21 files changed, 99 insertions(+), 86 deletions(-) diff --git a/pytential/linalg/proxy.py b/pytential/linalg/proxy.py index 59822b76d..16d6b9aa9 100644 --- a/pytential/linalg/proxy.py +++ b/pytential/linalg/proxy.py @@ -736,7 +736,8 @@ def prg() -> lp.ExecutorBase: # get nodes inside the boxes istart = tree.box_source_starts[iboxes] iend = istart + tree.box_source_counts_cumul[iboxes] - isources = np.hstack([np.arange(s, e) for s, e in zip(istart, iend)]) + isources = np.hstack([ + np.arange(s, e) for s, e in zip(istart, iend, strict=True)]) nodes = np.vstack([s[isources] for s in tree.sources]) isources = tree.user_source_ids[isources] diff --git a/pytential/qbx/fmm.py b/pytential/qbx/fmm.py index 34fdfa5c9..454705510 100644 --- a/pytential/qbx/fmm.py +++ b/pytential/qbx/fmm.py @@ -371,7 +371,7 @@ def eval_qbx_expansions(self, qbx_expansions): **self.kernel_extra_kwargs.copy()) - for pot_i, pot_res_i in zip(pot, pot_res): + for pot_i, pot_res_i in zip(pot, pot_res, strict=True): assert pot_i is pot_res_i return (pot, SumpyTimingFuture(queue, events)) @@ -585,7 +585,8 @@ def drive_fmm(expansion_wrangler, src_weight_vecs, timing_data=None): all_potentials_in_tree_order = wrangler.full_output_zeros(template_ary) - for ap_i, nqp_i in zip(all_potentials_in_tree_order, non_qbx_potentials): + for ap_i, nqp_i in zip(all_potentials_in_tree_order, non_qbx_potentials, + strict=False): ap_i[nqbtl.unfiltered_from_filtered_target_indices] = nqp_i all_potentials_in_tree_order += qbx_potentials diff --git a/pytential/qbx/geometry.py b/pytential/qbx/geometry.py index 71e08be66..436fe856d 100644 --- a/pytential/qbx/geometry.py +++ b/pytential/qbx/geometry.py @@ -489,7 +489,8 @@ def target_info(self): points=self.flat_centers()) for start, (target_discr, _) in zip( - target_discr_starts, self.target_discrs_and_qbx_sides): + target_discr_starts[:-1], + self.target_discrs_and_qbx_sides, strict=True): code_getter.copy_targets_kernel()( actx.queue, targets=targets[:, @@ -513,8 +514,8 @@ def target_side_preferences(self): target_side_preferences = actx.np.zeros(tgt_info.ntargets, dtype=np.int8) for tdstart, (target_discr, qbx_side) in zip( - tgt_info.target_discr_starts, - self.target_discrs_and_qbx_sides): + tgt_info.target_discr_starts[:-1], + self.target_discrs_and_qbx_sides, strict=True): target_side_preferences[tdstart:tdstart+target_discr.ndofs] = qbx_side return actx.freeze(target_side_preferences) @@ -939,12 +940,13 @@ def plot(self, draw_circles=False, draw_center_numbers=False, if draw_circles: for cx, cy, r in zip( centers[0], centers[1], - actx.to_numpy(self.flat_expansion_radii())): + actx.to_numpy(self.flat_expansion_radii()), strict=True): ax.add_artist(pt.Circle((cx, cy), r, fill=False, ls="dotted", lw=1)) if draw_center_numbers: - for icenter, (cx, cy) in enumerate(zip(centers[0], centers[1])): + for icenter, (cx, cy) in enumerate(zip(centers[0], centers[1], + strict=True)): pt.text(cx, cy, str(icenter), fontsize=8, ha="left", va="center", @@ -977,7 +979,7 @@ def plot(self, draw_circles=False, draw_center_numbers=False, for tx, ty, tcenter in zip( targets[0][self.ncenters:], targets[1][self.ncenters:], - ttc[self.ncenters:]): + ttc[self.ncenters:], strict=True): checked += 1 if tcenter >= 0: tccount += 1 diff --git a/pytential/qbx/utils.py b/pytential/qbx/utils.py index 3a51a5e65..13b9701fc 100644 --- a/pytential/qbx/utils.py +++ b/pytential/qbx/utils.py @@ -304,7 +304,7 @@ def _make_centers(discr): queue = actx.queue particles = tuple( actx.np.concatenate(dim_coords) - for dim_coords in zip(sources, centers, *targets)) + for dim_coords in zip(sources, centers, *targets, strict=True)) # Counts nparticles = len(particles[0]) diff --git a/pytential/symbolic/compiler.py b/pytential/symbolic/compiler.py index f5c93db99..8b2172cd7 100644 --- a/pytential/symbolic/compiler.py +++ b/pytential/symbolic/compiler.py @@ -103,7 +103,8 @@ def __str__(self): lines = [] lines.append("{" + comment) - for n, e, dnr in zip(self.names, self.exprs, self.do_not_return): + for n, e, dnr in zip(self.names, self.exprs, self.do_not_return, + strict=True): if dnr: dnr_indicator = "-#" else: @@ -643,7 +644,7 @@ def map_int_g(self, expr, name_hint=None): target_name=op.target, qbx_forced_limit=op.qbx_forced_limit, ) - for name, op in zip(names, group) + for name, op in zip(names, group, strict=True) ] self.code.append( @@ -661,7 +662,7 @@ def map_int_g(self, expr, name_hint=None): priority=max(getattr(op, "priority", 0) for op in group), )) - for name, group_expr in zip(names, group): + for name, group_expr in zip(names, group, strict=True): self.expr_to_var[group_expr] = NamedIntermediateResult(name) return self.expr_to_var[expr] diff --git a/pytential/symbolic/dof_connection.py b/pytential/symbolic/dof_connection.py index 65c5e5c68..5aaf9bfb0 100644 --- a/pytential/symbolic/dof_connection.py +++ b/pytential/symbolic/dof_connection.py @@ -113,7 +113,7 @@ def prg(): "idof": ConcurrentDOFInameTag()}) results = [] - for grp, subary1, subary2 in zip(self.discr.groups, ary1, ary2): + for grp, subary1, subary2 in zip(self.discr.groups, ary1, ary2, strict=True): if subary1.dtype != subary2.dtype: raise ValueError("dtype mismatch in inputs: " f"'{subary1.dtype.name}' and '{subary2.dtype.name}'") diff --git a/pytential/symbolic/execution.py b/pytential/symbolic/execution.py index b39c276d0..a0e6e9f17 100644 --- a/pytential/symbolic/execution.py +++ b/pytential/symbolic/execution.py @@ -309,7 +309,7 @@ def map_is_shape_class(self, expr): def exec_assign(self, actx: PyOpenCLArrayContext, insn, bound_expr, evaluate): return [(name, evaluate(expr)) - for name, expr in zip(insn.names, insn.exprs)] + for name, expr in zip(insn.names, insn.exprs, strict=True)] def exec_compute_potential_insn( self, actx: PyOpenCLArrayContext, insn, bound_expr, evaluate): @@ -470,7 +470,7 @@ def flatten(self, ary): from arraycontext import flatten result = self.array_context.zeros(self.total_dofs, self.dtype) - for res_i, (start, end) in zip(ary, self.starts_and_ends): + for res_i, (start, end) in zip(ary, self.starts_and_ends, strict=True): result[start:end] = flatten(res_i, self.array_context) return result @@ -478,7 +478,7 @@ def flatten(self, ary): def unflatten(self, ary): # Convert a flat version of *ary* into a structured version. components = [] - for discr, (start, end) in zip(self.discrs, self.starts_and_ends): + for discr, (start, end) in zip(self.discrs, self.starts_and_ends, strict=True): component = ary[start:end] from meshmode.discretization import Discretization diff --git a/pytential/symbolic/mappers.py b/pytential/symbolic/mappers.py index ae99a75d4..e533c1f0c 100644 --- a/pytential/symbolic/mappers.py +++ b/pytential/symbolic/mappers.py @@ -60,11 +60,11 @@ def rec_int_g_arguments(mapper, expr): } changed = ( - all(d is orig for d, orig in zip(densities, expr.densities)) + all(d is orig for d, orig in zip(densities, expr.densities, strict=True)) and all( arg is orig for arg, orig in zip( kernel_arguments.values(), - expr.kernel_arguments.values())) + expr.kernel_arguments.values(), strict=True)) ) return densities, kernel_arguments, changed @@ -510,7 +510,8 @@ def map_constant(self, expr): def map_sum(self, expr): children = [self.rec(child) for child in expr.children] - if all(child is orig for child, orig in zip(children, expr.children)): + if all(child is orig for child, orig in zip( + children, expr.children, strict=True)): return expr from pymbolic.primitives import flattened_sum @@ -819,7 +820,8 @@ def _stringify_kernel_args(self, kernel_arguments): def map_int_g(self, expr, enclosing_prec): source_kernels_str = " + ".join([ "{} * {}".format(self.rec(density, PREC_PRODUCT), source_kernel) - for source_kernel, density in zip(expr.source_kernels, expr.densities) + for source_kernel, density in zip( + expr.source_kernels, expr.densities, strict=True) ]) target_kernel_str = str(expr.target_kernel) base_kernel_str = str(expr.target_kernel.get_base_kernel()) diff --git a/pytential/symbolic/matrix.py b/pytential/symbolic/matrix.py index 60aed6768..985e38c01 100644 --- a/pytential/symbolic/matrix.py +++ b/pytential/symbolic/matrix.py @@ -380,7 +380,7 @@ def map_int_g(self, expr): assert abs(expr.qbx_forced_limit) > 0 result = 0 - for kernel, density in zip(expr.source_kernels, expr.densities): + for kernel, density in zip(expr.source_kernels, expr.densities, strict=True): rec_density = self.rec(density) if is_zero(rec_density): continue @@ -463,7 +463,7 @@ def map_int_g(self, expr): target_base_kernel = expr.target_kernel.get_base_kernel() result = 0 - for density, kernel in zip(expr.densities, expr.source_kernels): + for density, kernel in zip(expr.densities, expr.source_kernels, strict=True): rec_density = self.rec(density) if is_zero(rec_density): continue @@ -553,7 +553,7 @@ def map_int_g(self, expr): result = 0 assert abs(expr.qbx_forced_limit) > 0 - for kernel, density in zip(expr.source_kernels, expr.densities): + for kernel, density in zip(expr.source_kernels, expr.densities, strict=True): rec_density = self._inner_mapper.rec(density) if is_zero(rec_density): continue @@ -640,7 +640,7 @@ def map_int_g(self, expr): target_base_kernel = expr.target_kernel.get_base_kernel() result = 0 - for kernel, density in zip(expr.source_kernels, expr.densities): + for kernel, density in zip(expr.source_kernels, expr.densities, strict=True): rec_density = self._inner_mapper.rec(density) if is_zero(rec_density): continue diff --git a/pytential/symbolic/pde/maxwell/__init__.py b/pytential/symbolic/pde/maxwell/__init__.py index e8ffa9361..7b76c2a6c 100644 --- a/pytential/symbolic/pde/maxwell/__init__.py +++ b/pytential/symbolic/pde/maxwell/__init__.py @@ -201,7 +201,7 @@ def __init__(self, omega, mus, epss): self.epss = epss self.ks = [ sym.cse(omega*(eps*mu)**0.5, f"k{i}") - for i, (eps, mu) in enumerate(zip(epss, mus))] + for i, (eps, mu) in enumerate(zip(epss, mus, strict=True))] def make_unknown(self, name): return sym.make_sym_vector(name, 6) diff --git a/pytential/unregularized.py b/pytential/unregularized.py index 06b3b0a2d..f171eb004 100644 --- a/pytential/unregularized.py +++ b/pytential/unregularized.py @@ -454,7 +454,8 @@ def target_info(self): (lpot_src.ambient_dim, ntargets), self.coord_dtype) - for start, target_discr in zip(target_discr_starts, target_discrs): + for start, target_discr in zip(target_discr_starts[:-1], + target_discrs, strict=True): code_getter.copy_targets_kernel()(actx.queue, targets=targets[:, start:start+target_discr.ndofs], points=flatten( diff --git a/pytential/utils.py b/pytential/utils.py index 99613f249..c28ea659b 100644 --- a/pytential/utils.py +++ b/pytential/utils.py @@ -33,7 +33,7 @@ def sort_arrays_together(*arys, key=None): :param key: a function that takes in a tuple of values and returns a value to compare. """ - return zip(*sorted(zip(*arys), key=key)) + return zip(*sorted(zip(*arys, strict=True), key=key), strict=True) def pytest_teardown_function(): diff --git a/test/extra_curve_data.py b/test/extra_curve_data.py index 4d8566e37..be38aec25 100644 --- a/test/extra_curve_data.py +++ b/test/extra_curve_data.py @@ -54,6 +54,8 @@ def __init__(self, *objs): self.curves = curves def __call__(self, ts): + from itertools import pairwise + ranges = np.linspace(0, 1, len(self.curves) + 1) ts_argsort = np.argsort(ts) ts_sorted = ts[ts_argsort] @@ -62,10 +64,9 @@ def __call__(self, ts): # trail of 1s, then they won't be forwarded to the last curve. ts_split_points[-1] = len(ts) result = [] - subranges = [ - slice(*pair) for pair in zip(ts_split_points, ts_split_points[1:])] + subranges = [slice(*pair) for pair in pairwise(ts_split_points)] for curve, subrange, (start, end) in zip( - self.curves, subranges, zip(ranges, ranges[1:])): + self.curves, subranges, pairwise(ranges), strict=True): ts_mapped = (ts_sorted[subrange] - start) / (end - start) result.append(curve(ts_mapped)) final = np.concatenate(result, axis=-1) diff --git a/test/extra_int_eq_data.py b/test/extra_int_eq_data.py index 5d9c40260..4cfb7eb12 100644 --- a/test/extra_int_eq_data.py +++ b/test/extra_int_eq_data.py @@ -20,8 +20,9 @@ THE SOFTWARE. """ +from collections.abc import Callable from dataclasses import dataclass, field -from typing import Any, Callable, Dict, List, Optional, Type, Tuple, Union +from typing import Any import numpy as np @@ -86,41 +87,41 @@ def make_source_and_target_points( @dataclass class IntegralEquationTestCase: - name: Optional[str] = None - ambient_dim: Optional[int] = None + name: str | None = None + ambient_dim: int | None = None # operator - knl_class_or_helmholtz_k: Union[Type[Kernel], float] = 0 - knl_kwargs: Dict[str, Any] = field(default_factory=dict) + knl_class_or_helmholtz_k: type[Kernel] | float = 0 + knl_kwargs: dict[str, Any] = field(default_factory=dict) bc_type: str = "dirichlet" side: int = -1 # qbx - qbx_order: Optional[int] = None + qbx_order: int | None = None source_ovsmp: int = 4 - target_order: Optional[int] = None + target_order: int | None = None use_refinement: bool = True - group_cls: Type[MeshElementGroup] = SimplexElementGroup - group_factory_cls: Type[ElementGroupFactory] = InterpolatoryQuadratureGroupFactory + group_cls: type[MeshElementGroup] = SimplexElementGroup + group_factory_cls: type[ElementGroupFactory] = InterpolatoryQuadratureGroupFactory # fmm - fmm_backend: Optional[str] = "sumpy" - fmm_order: Optional[int] = None - fmm_tol: Optional[float] = None + fmm_backend: str | None = "sumpy" + fmm_order: int | None = None + fmm_tol: float | None = None disable_fft: bool = False # solver gmres_tol: float = 1.0e-14 # test case - resolutions: Optional[List[int]] = None - inner_radius: Optional[float] = None - outer_radius: Optional[float] = None + resolutions: list[int] | None = None + inner_radius: float | None = None + outer_radius: float | None = None check_tangential_deriv: bool = True check_gradient: bool = False - box_extent_norm: Optional[str] = None - from_sep_smaller_crit: Optional[str] = None + box_extent_norm: str | None = None + from_sep_smaller_crit: str | None = None # {{{ symbolic @@ -259,13 +260,13 @@ class CurveTestCase(IntegralEquationTestCase): target_order: int = 5 # fmm - fmm_backend: Optional[str] = None + fmm_backend: str | None = None # test case - curve_fn: Optional[Callable[[np.ndarray], np.ndarray]] = None + curve_fn: Callable[[np.ndarray], np.ndarray] | None = None inner_radius: float = 0.1 outer_radius: float = 2 - resolutions: List[int] = field(default_factory=lambda: [40, 50, 60]) + resolutions: list[int] = field(default_factory=lambda: [40, 50, 60]) def _curve_fn(self, t): return self.curve_fn(t) @@ -299,7 +300,7 @@ class CircleTestCase(EllipseTestCase): @dataclass class WobbleCircleTestCase(CurveTestCase): name: str = "wobble-circle" - resolutions: List[int] = field(default_factory=lambda: [2000, 3000, 4000]) + resolutions: list[int] = field(default_factory=lambda: [2000, 3000, 4000]) def _curve_fn(self, t): from meshmode.mesh.generation import WobblyCircle @@ -316,7 +317,7 @@ class StarfishTestCase(CurveTestCase): inner_radius: float = 0.25 outer_radius: float = 2.0 - resolutions: List[int] = field(default_factory=lambda: [30, 50, 70, 90]) + resolutions: list[int] = field(default_factory=lambda: [30, 50, 70, 90]) def _curve_fn(self, t): from meshmode.mesh.generation import NArmedStarfish @@ -335,7 +336,7 @@ class Helmholtz3DTestCase(IntegralEquationTestCase): use_refinement: bool = False # fmm - fmm_backend: Optional[str] = "fmmlib" + fmm_backend: str | None = "fmmlib" # solver gmres_tol: float = 1.0e-7 @@ -353,7 +354,7 @@ class HelmholtzEllisoidTestCase(Helmholtz3DTestCase): fmm_order: int = 13 # test case - resolutions: List[int] = field( + resolutions: list[int] = field( default_factory=lambda: [2.0, 0.8]) # type: ignore[list-item] inner_radius: float = 0.4 outer_radius: float = 5.0 @@ -383,14 +384,14 @@ class SphereTestCase(IntegralEquationTestCase): use_refinement: bool = False # fmm - fmm_backend: Optional[str] = "fmmlib" + fmm_backend: str | None = "fmmlib" fmm_tol: float = 1.0e-4 # solver gmres_tol: float = 1.0e-7 # test case - resolutions: List[int] = field(default_factory=lambda: [1, 2]) + resolutions: list[int] = field(default_factory=lambda: [1, 2]) check_gradient: bool = False check_tangential_deriv: bool = False @@ -443,7 +444,7 @@ class GMSHSphereTestCase(SphereTestCase): name: str = "gmsphere" radius: float = 1.5 - resolutions: List[int] = field( + resolutions: list[int] = field( default_factory=lambda: [0.4]) # type: ignore[list-item] def get_mesh(self, resolution, mesh_order): @@ -500,7 +501,7 @@ class TorusTestCase(IntegralEquationTestCase): r_minor: float = 2.0 # test case - resolutions: List[int] = field(default_factory=lambda: [0, 1, 2]) + resolutions: list[int] = field(default_factory=lambda: [0, 1, 2]) def get_mesh(self, resolution, mesh_order): from meshmode.mesh.generation import generate_torus @@ -519,7 +520,7 @@ class MergedCubesTestCase(Helmholtz3DTestCase): use_refinement: bool = True # test case - resolutions: List[int] = field( + resolutions: list[int] = field( default_factory=lambda: [1.4]) # type: ignore[list-item] inner_radius: float = 0.4 outer_radius: float = 12.0 @@ -542,7 +543,7 @@ class ManyEllipsoidTestCase(Helmholtz3DTestCase): name: str = "ellipsoid" # test case - resolutions: List[int] = field(default_factory=lambda: [2, 1]) + resolutions: list[int] = field(default_factory=lambda: [2, 1]) inner_radius: float = 0.4 outer_radius: float = 5.0 @@ -596,11 +597,11 @@ class EllipticPlaneTestCase(IntegralEquationTestCase): use_refinement: bool = True # fmm - fmm_backend: Optional[str] = "fmmlib" + fmm_backend: str | None = "fmmlib" fmm_tol: float = 1.0e-4 # test case - resolutions: List[int] = field( + resolutions: list[int] = field( default_factory=lambda: [0.1]) # type: ignore[list-item] inner_radius: float = 0.2 outer_radius: float = 12 # was '-13' in some large-scale run (?) @@ -645,12 +646,12 @@ class BetterPlaneTestCase(IntegralEquationTestCase): target_order: int = 6 # fmm - fmm_backend: Optional[str] = "fmmlib" + fmm_backend: str | None = "fmmlib" fmm_tol: float = 1.0e-4 use_refinement: bool = True # test case - resolutions: List[int] = field( + resolutions: list[int] = field( default_factory=lambda: [0.2]) # type: ignore[list-item] inner_radius: float = 0.2 outer_radius: float = 15 @@ -662,7 +663,7 @@ class BetterPlaneTestCase(IntegralEquationTestCase): # other stuff visualize_geometry: bool = True - vis_grid_spacing: Tuple[float, float, float] = field( + vis_grid_spacing: tuple[float, float, float] = field( default_factory=lambda: (0.025, 0.2, 0.025)) vis_extend_factor: float = 0.2 diff --git a/test/extra_matrix_data.py b/test/extra_matrix_data.py index f8b2c990d..7c2b8e5a6 100644 --- a/test/extra_matrix_data.py +++ b/test/extra_matrix_data.py @@ -1,5 +1,6 @@ +from collections.abc import Callable from dataclasses import dataclass -from typing import Any, Callable, Optional +from typing import Any import numpy as np @@ -22,26 +23,26 @@ class MatrixTestCaseMixin: # operators op_type: str = "scalar" # disable fmm for matrix tests - fmm_backend: Optional[str] = None + fmm_backend: str | None = None # partitioning approx_cluster_count: int = 10 - max_particles_in_box: Optional[int] = None + max_particles_in_box: int | None = None tree_kind: str = "adaptive-level-restricted" index_sparsity_factor: float = 1.0 # proxy proxy_radius_factor: float = 1.0 - proxy_approx_count: Optional[float] = None + proxy_approx_count: float | None = None # skeletonization id_eps: float = 1.0e-8 skel_discr_stage: DiscretizationStages = sym.QBX_SOURCE_STAGE2 - weighted_proxy: Optional[bool] = None - proxy_source_cluster_builder: Optional[Callable[..., Any]] = None - proxy_target_cluster_builder: Optional[Callable[..., Any]] = None - neighbor_cluster_builder: Optional[Callable[..., Any]] = None + weighted_proxy: bool | None = None + proxy_source_cluster_builder: Callable[..., Any] | None = None + proxy_target_cluster_builder: Callable[..., Any] | None = None + neighbor_cluster_builder: Callable[..., Any] | None = None def get_cluster_index(self, actx, places, dofdesc=None): if dofdesc is None: diff --git a/test/test_beltrami.py b/test/test_beltrami.py index e61a98001..e5c5c8c02 100644 --- a/test/test_beltrami.py +++ b/test/test_beltrami.py @@ -77,7 +77,7 @@ def evaluate_sphere_eigf(actx, discr, m: int, n: int) -> DOFArray: from scipy.special import sph_harm # pylint: disable=no-name-in-module y_mn = [] - for gtheta, gphi in zip(theta, phi): + for gtheta, gphi in zip(theta, phi, strict=True): result = sph_harm(m, n, actx.to_numpy(gphi), actx.to_numpy(gtheta)) y_mn.append(actx.from_numpy(result.real.copy())) diff --git a/test/test_global_qbx.py b/test/test_global_qbx.py index 5ebdd4554..9453bfd5d 100644 --- a/test/test_global_qbx.py +++ b/test/test_global_qbx.py @@ -394,7 +394,8 @@ def targets_from_sources(sign, dist, dim=2): vol_int_slice, vol_ext_slice, far_slice, - ) = (slice(start, end) for start, end in zip(np.r_[0, sizes], sizes)) + ) = (slice(start, end) + for start, end in zip(np.r_[0, sizes[:-1]], sizes, strict=True)) # }}} @@ -438,7 +439,7 @@ def visualize_curve_and_assoc(): for tx, ty, tcenter in zip( targets[0, tgt_slice], targets[1, tgt_slice], - target_assoc.target_to_center[tgt_slice]): + target_assoc.target_to_center[tgt_slice], strict=True): if tcenter >= 0: ax.add_artist( plt.Line2D( diff --git a/test/test_linalg_skeletonization.py b/test/test_linalg_skeletonization.py index ca0f12d83..76191c390 100644 --- a/test/test_linalg_skeletonization.py +++ b/test/test_linalg_skeletonization.py @@ -20,9 +20,9 @@ THE SOFTWARE. """ +from collections.abc import Sequence from dataclasses import replace from functools import partial -from typing import Sequence, Union import pytest import numpy as np @@ -48,7 +48,7 @@ PytestPyOpenCLArrayContextFactory, ]) -SKELETONIZE_TEST_CASES: Sequence[Union[extra.CurveTestCase, extra.TorusTestCase]] = [ +SKELETONIZE_TEST_CASES: Sequence[extra.CurveTestCase | extra.TorusTestCase] = [ extra.CurveTestCase( name="ellipse", op_type="scalar", diff --git a/test/test_stokes.py b/test/test_stokes.py index dbd7f5a09..636e4d0b1 100644 --- a/test/test_stokes.py +++ b/test/test_stokes.py @@ -237,7 +237,7 @@ def run_exterior_stokes(actx_factory, *, v_error = [0.0] * ambient_dim v_error[:ambient_dim] = [ dof_array_rel_error(actx, u, uref) - for u, uref in zip(velocity, ref_velocity)] + for u, uref in zip(velocity, ref_velocity, strict=True)] h_max = actx.to_numpy( bind(places, sym.h_max(ambient_dim))(actx) ) @@ -300,7 +300,7 @@ def test_exterior_stokes(actx_factory, ambient_dim, visualize=False): resolution=resolution, visualize=visualize) - for eoc, e in zip(eocs, errors): + for eoc, e in zip(eocs, errors, strict=True): eoc.add_data_point(h_max, e) for eoc in eocs: @@ -345,7 +345,7 @@ def run_stokes_identity(actx_factory, case, identity, resolution, visualize=Fals error = [0.0] * places.ambient_dim error[:places.ambient_dim] = [ discr_rel_error(actx, density_discr, x, xref, p=np.inf) - for x, xref in zip(result, ref_result)] + for x, xref in zip(result, ref_result, strict=True)] logger.info("resolution %4d h_min %.5e h_max %.5e error %.5e %.5e %.5e", resolution, h_min, h_max, *error) @@ -443,7 +443,7 @@ def test_stokeslet_identity(actx_factory, cls, visualize=False): resolution=resolution, visualize=visualize) - for eoc, e in zip(eocs, errors): + for eoc, e in zip(eocs, errors, strict=True): eoc.add_data_point(h_max, e) for eoc in eocs: @@ -502,7 +502,7 @@ def test_stresslet_identity(actx_factory, cls, visualize=False): resolution=resolution, visualize=visualize) - for eoc, e in zip(eocs, errors): + for eoc, e in zip(eocs, errors, strict=True): eoc.add_data_point(h_max, e) for eoc in eocs: diff --git a/test/test_symbolic.py b/test/test_symbolic.py index 732286cf4..9abd50c13 100644 --- a/test/test_symbolic.py +++ b/test/test_symbolic.py @@ -322,7 +322,7 @@ def randrange_like(xi, offset): base_node_nrs = np.cumsum([0] + [grp.ndofs for grp in discr.groups]) ary = DOFArray(actx, data=tuple( randrange_like(xi, offset) - for xi, offset in zip(discr.nodes()[0], base_node_nrs) + for xi, offset in zip(discr.nodes()[0], base_node_nrs[:-1], strict=True) )) n = discr.ndofs diff --git a/test/test_tools.py b/test/test_tools.py index 360313188..db1d799da 100644 --- a/test/test_tools.py +++ b/test/test_tools.py @@ -158,7 +158,8 @@ def test_geometry_collection_caching(actx_factory): # construct a geometry collection from pytential import GeometryCollection - places = GeometryCollection(dict(zip(sources, lpots)), auto_where=sources[0]) + places = GeometryCollection(dict(zip(sources, lpots, strict=True)), + auto_where=sources[0]) logger.info("%s", places.places) # check on-demand refinement From a2334fb962c5e727a7b0c7ec6edf6e5e983aa8ee Mon Sep 17 00:00:00 2001 From: Alexandru Fikl Date: Sat, 26 Oct 2024 15:37:35 +0300 Subject: [PATCH 4/5] mypy: fix some new errors --- pytential/linalg/gmres.py | 2 +- pytential/symbolic/compiler.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pytential/linalg/gmres.py b/pytential/linalg/gmres.py index fa367fb03..d2345469a 100644 --- a/pytential/linalg/gmres.py +++ b/pytential/linalg/gmres.py @@ -193,7 +193,7 @@ def norm(x): if (stall_iterations and len(residual_norms) > stall_iterations and norm_r > ( - residual_norms[-stall_iterations] # pylint:disable=invalid-unary-operand-type + residual_norms[-stall_iterations] / no_progress_factor)): state = "stalled" diff --git a/pytential/symbolic/compiler.py b/pytential/symbolic/compiler.py index 8b2172cd7..f303b5741 100644 --- a/pytential/symbolic/compiler.py +++ b/pytential/symbolic/compiler.py @@ -20,7 +20,7 @@ THE SOFTWARE. """ -from collections.abc import AbstractSet, Collection, Iterator, Hashable, Sequence +from collections.abc import Collection, Iterator, Hashable, Sequence, Set from dataclasses import dataclass from functools import reduce from typing import Any @@ -190,7 +190,7 @@ def get_assignees(self): return {o.name for o in self.outputs} def get_dependencies(self, dep_mapper: DependencyMapper) -> set[Expression]: - result = dep_mapper(self.densities[0]) + result = set(dep_mapper(self.densities[0])) for density in self.densities[1:]: result.update(dep_mapper(density)) @@ -326,7 +326,7 @@ def gen_expr_arrow(expr, target_node): class Code: def __init__( self, - inputs: AbstractSet[str], + inputs: Set[str], schedule: Sequence[tuple[Statement, Collection[str]]], result: np.ndarray, ) -> None: @@ -359,8 +359,8 @@ def _get_next_step( dep_mapper: DependencyMapper, statements: Sequence[Statement], result: np.ndarray, - available_names: AbstractSet[str], - done_stmts: AbstractSet[Statement] + available_names: Set[str], + done_stmts: Set[Statement] ) -> tuple[Statement, set[str]]: from pytools import argmax2 From 3d736d5a5c3a2940ba0e0407c47f60a48b1d40be Mon Sep 17 00:00:00 2001 From: Alexandru Fikl Date: Sat, 26 Oct 2024 16:14:15 +0300 Subject: [PATCH 5/5] pylint: force min version --- .pylintrc-local.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.pylintrc-local.yml b/.pylintrc-local.yml index c941e862b..57d5fdaf2 100644 --- a/.pylintrc-local.yml +++ b/.pylintrc-local.yml @@ -1,3 +1,6 @@ +- arg: py-version + val: '3.10' + - arg: ignore val: - old_diffop_primitives.py