From d5849c0a7af8d159d441e4a295cf8ec76264310e Mon Sep 17 00:00:00 2001 From: Albert Yau <5298134+mwyau@users.noreply.github.com> Date: Tue, 24 Mar 2026 22:37:42 -0400 Subject: [PATCH 1/9] refactor: remove pyshtools and use ducc0 natively for SHT operations --- README.md | 9 +- docs/changelog.md | 4 +- docs/hodges.md | 6 +- pyproject.toml | 4 +- src/pystormtracker/cli.py | 10 + src/pystormtracker/hodges/tracker.py | 14 +- src/pystormtracker/models/tracker.py | 1 + .../preprocessing/kinematics.py | 127 ++++------- src/pystormtracker/preprocessing/spectral.py | 101 ++++----- src/pystormtracker/simple/concurrent.py | 16 +- src/pystormtracker/simple/tracker.py | 20 +- tests/test_cli.py | 3 + tests/test_integration_spectral.py | 3 +- tests/test_integration_vorticity.py | 16 +- uv.lock | 210 +----------------- 15 files changed, 171 insertions(+), 373 deletions(-) diff --git a/README.md b/README.md index 60e55b12..f7bcad67 100644 --- a/README.md +++ b/README.md @@ -59,10 +59,9 @@ Full documentation, including API references and advanced usage examples, is ava - Python 3.11+ - (Optional) OpenMPI for MPI support. - **SHT Backends**: Supported engines for filtering and derivatives: - - `ducc0`: **Core dependency**. High-precision C++ backend. Default for derivatives. - - `pyshtools`: **Core dependency**. Includes Fortran backend. - - `shtns`: (Optional) High-performance C library (`pip install PyStormTracker[pre]`). Recommended for large datasets. -- **Windows**: GRIB support is experimental. SHTns is not supported on Windows; `ducc0` or `shtools` will be used automatically. + - `ducc0`: **Core dependency**. High-precision C++ library (Distinctly Useful Code Collection) providing high performance spherical harmonic transforms. + - `shtns`: (Optional) High-performance C library (`pip install PyStormTracker[shtns]`). Recommended for large datasets. +- **Windows**: GRIB support is experimental. SHTns is not supported on Windows; `ducc0` will be used automatically. ### From PyPI You can install the latest stable version of PyStormTracker directly from PyPI: @@ -135,7 +134,7 @@ stormtracker -i data.nc -v msl -o my_tracks.txt | `--threshold` | `-t` | Intensity threshold for feature detection. | | `--filter-range` | | Spectral filter range (min-max). Default '5-42'. | | `--no-filter` | | Disable default T5-42 spectral filtering. | -| `--engine` | | SHT backend: `auto`, `shtns`, `ducc0`, `shtools`. | +| `--sht-engine` | | SHT backend: `auto`, `shtns`, `ducc0`. | | `--num` | `-n` | Number of time steps to process. | | **Performance** | | | | `--backend` | `-b` | `serial`, `dask`, or `mpi`. Auto-detected by default. | diff --git a/docs/changelog.md b/docs/changelog.md index f1823eb7..a5bc3633 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -2,7 +2,7 @@ ## v0.5.0.dev ### Features -- **Spectral Backends**: Integrated support for `shtns`, `ducc0`, and `shtools` for all spherical harmonic operations. +- **Spectral Backends**: Integrated support for `shtns` and `ducc0` for all spherical harmonic operations. - **High-Precision Derivatives**: New `vodv` module for computing relative vorticity and divergence using spin-1 vector harmonics. - **Planetary Constants**: Standardized Earth radius to 6,371,220 m across derivatives and tracking geometry. - **pst-convert & JSON Support**: New utility for format conversion and a GPU-optimized JSON format for track data. @@ -15,7 +15,7 @@ ### Performance - **SHTns Optimization**: Optimized thread handling for spherical harmonic transforms. -- **Backend Fallback**: Implemented `pyshtools` as the default backend for spherical harmonic filtering, with `shtns` as an optional, high-performance alternative. +- **Backend Fallback**: Implemented native `ducc0` usage for spherical harmonic filtering and transformations, removing the previous wrapper dependency. `shtns` remains an optional alternative. - **ARM64 Support**: Restored full support for **ARM64** architectures in Docker and CI by making `shtns` an optional dependency. ### CI/CD & Testing diff --git a/docs/hodges.md b/docs/hodges.md index 155b9a3c..cffc0442 100644 --- a/docs/hodges.md +++ b/docs/hodges.md @@ -11,12 +11,12 @@ This document details the architecture, mathematical implementation, and design ## 1. Feature Identification Design ### 1.1 Preprocessing (Spectral Filtering & Derivatives) -**Design Choice**: Integrated native spherical harmonic filtering (e.g., T42 truncation) and high-precision derivative calculation using the `shtns`, `ducc0`, or `shtools` backends. +**Design Choice**: Integrated native spherical harmonic filtering (e.g., T42 truncation) and high-precision derivative calculation using the `shtns` or `ducc0` backends. - **References**: `spec_filt.c`, `uv2vr.c`. -- **CLI Options**: `--filter-range` (e.g., `5-42`), `--no-filter`, `--engine` (auto/shtns/ducc0/shtools), and `--taper`. +- **CLI Options**: `--filter-range` (e.g., `5-42`), `--no-filter`, `--engine` (auto/shtns/ducc0), and `--taper`. **Reasoning**: -Original TRACK workflows typically require offline spectral filtering to remove the planetary background and high-frequency noise. `PyStormTracker` incorporates this directly into its preprocessing module for on-the-fly execution. By default, the Hodges algorithm applies a T5-42 band-pass filter unless `--no-filter` is specified. The system also supports high-precision **Relative Vorticity** and **Divergence** calculation from wind components using spin-1 vector harmonics, ensuring bit-wise parity with NCL/Spherepack when using the `ducc0` or `shtools` backends. +Original TRACK workflows typically require offline spectral filtering to remove the planetary background and high-frequency noise. `PyStormTracker` incorporates this directly into its preprocessing module for on-the-fly execution. By default, the Hodges algorithm applies a T5-42 band-pass filter unless `--no-filter` is specified. The system also supports high-precision **Relative Vorticity** and **Divergence** calculation from wind components using spin-1 vector harmonics, ensuring bit-wise parity with NCL/Spherepack when using the `ducc0` backend. ### 1.2 Object-Based Detection **Design Choice**: Feature detection is implemented as a multi-stage pipeline: `Thresholding -> Connected Component Labeling (CCL) -> Object Filtering -> Local Extrema`. diff --git a/pyproject.toml b/pyproject.toml index 2315f0d2..233a18f3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ "h5netcdf[h5py]>=1.0.0", "numpy>=1.24.0", "numba>=0.60.0", - "pyshtools[ducc]>=4.10.4", + "ducc0>=0.33.0", "xarray>=2024.9.0", ] @@ -157,4 +157,4 @@ exclude_lines = [ [tool.uv] link-mode = "copy" -no-binary-package = ["shtns"] +no-binary-package = ["ducc0", "shtns"] diff --git a/src/pystormtracker/cli.py b/src/pystormtracker/cli.py index f05bc2aa..5b7796d1 100644 --- a/src/pystormtracker/cli.py +++ b/src/pystormtracker/cli.py @@ -57,6 +57,7 @@ def run_tracker( lmax: int = constants.LMAX_DEFAULT, taper_points: int = constants.TAPER_DEFAULT, overlap: int = model_constants.OVERLAP_DEFAULT, + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", ) -> None: """Orchestrates the storm tracking process from the CLI.""" timer: dict[str, float] = {} @@ -152,6 +153,7 @@ def run_tracker( lmax=lmax, taper_points=taper_points, overlap=overlap, + sht_engine=sht_engine, ) # Export Phase @@ -238,6 +240,13 @@ def parse_args() -> Namespace: ) # Default is determined in main() based on algorithm + general.add_argument( + "--sht-engine", + choices=["auto", "shtns", "ducc0"], + default="auto", + help="SHT backend for filtering and derivatives. Default 'auto'.", + ) + general.add_argument( "-n", "--num", type=int, help="Number of time steps to process." ) @@ -455,6 +464,7 @@ def main() -> None: lmax=lmax, taper_points=args.taper, overlap=args.overlap, + sht_engine=args.sht_engine, ) diff --git a/src/pystormtracker/hodges/tracker.py b/src/pystormtracker/hodges/tracker.py index c7376e27..88406fa7 100644 --- a/src/pystormtracker/hodges/tracker.py +++ b/src/pystormtracker/hodges/tracker.py @@ -80,6 +80,7 @@ def preprocess_standard_track( lmin: int = constants.LMIN_DEFAULT, lmax: int = constants.LMAX_DEFAULT, taper_points: int = constants.TAPER_DEFAULT, + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", ) -> xr.DataArray: """ Applies standard TRACK preprocessing: Tapering -> Spherical Harmonic Filter. @@ -94,7 +95,7 @@ def preprocess_standard_track( data = cast(xr.DataArray, taper.filter(data)) # 2. Spectral Filtering - spectral_filter = SpectralFilter(lmin=lmin, lmax=lmax) + spectral_filter = SpectralFilter(lmin=lmin, lmax=lmax, sht_engine=sht_engine) data = spectral_filter.filter(data) return data @@ -161,6 +162,7 @@ def track( lmin: int = constants.LMIN_DEFAULT, lmax: int = constants.LMAX_DEFAULT, taper_points: int = constants.TAPER_DEFAULT, + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", **kwargs: float | int | str | None, ) -> Tracks: """ @@ -181,6 +183,8 @@ def track( min_points: Minimum grid points per object. filter: If True, apply spectral filtering. lmin, lmax: Spectral truncation range (default T5-42). + taper_points: Boundary tapering points. + sht_engine: SHT backend engine. """ import timeit @@ -199,7 +203,13 @@ def track( data_xr = detector_peek.get_xarray(start_time, end_time) if filter: - data_xr = self.preprocess_standard_track(data_xr, lmin=lmin, lmax=lmax) + data_xr = self.preprocess_standard_track( + data_xr, + lmin=lmin, + lmax=lmax, + taper_points=taper_points, + sht_engine=sht_engine, + ) t1 = timeit.default_timer() print(f" [Serial] Preprocessing time: {t1 - t0:.4f}s") diff --git a/src/pystormtracker/models/tracker.py b/src/pystormtracker/models/tracker.py index 4462a385..d30c8b36 100644 --- a/src/pystormtracker/models/tracker.py +++ b/src/pystormtracker/models/tracker.py @@ -41,5 +41,6 @@ def track( lmin: int = 5, lmax: int = 42, taper_points: int = 0, + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", **kwargs: float | int | str | None, ) -> Tracks: ... diff --git a/src/pystormtracker/preprocessing/kinematics.py b/src/pystormtracker/preprocessing/kinematics.py index ec50bb5e..5cdaa983 100644 --- a/src/pystormtracker/preprocessing/kinematics.py +++ b/src/pystormtracker/preprocessing/kinematics.py @@ -19,8 +19,8 @@ class KinematicsKwargs(TypedDict, total=False): def _resolve_engine( - sht_engine: Literal["auto", "shtns", "ducc0", "shtools"], -) -> Literal["shtns", "ducc0", "shtools"]: + sht_engine: Literal["auto", "shtns", "ducc0"], +) -> Literal["shtns", "ducc0"]: """Resolves 'auto' engine to the best available backend.""" if sht_engine == "auto": return "shtns" if SHTNS_AVAILABLE else "ducc0" @@ -49,8 +49,6 @@ def _get_shtns_plan(nlat: int, nlon: int, lmax: int) -> shtns.sht: _thread_local.cache = {} key = (nlat, nlon, lmax) if key not in _thread_local.cache: - # grid_lmax = (nlat - 1) // 2 - # actual_lmax = min(lmax, grid_lmax) # SHTns can handle lmax beyond sampling theorem for analysis, # but results might be aliased if not careful. # NCL/Spherepack T42 uses lmax=42 for 73x144. @@ -68,7 +66,7 @@ def compute_vort_div( lmax: int | None = None, geometry: str = "CC", nthreads: int = 0, - sht_engine: Literal["auto", "shtns", "ducc0", "shtools"] = "auto", + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", ) -> tuple[NDArray[np.float64], NDArray[np.float64]]: """ Computes spatial divergence and relative vorticity from u and v wind components. @@ -78,9 +76,9 @@ def compute_vort_div( v: Meridional wind (ntheta, nphi). R: Planetary radius in meters. Default is R_EARTH_METERS. lmax: Maximum spherical harmonic degree. If None, derived from ntheta. - geometry: Grid geometry (for ducc0/shtools). Default 'CC'. + geometry: Grid geometry (for ducc0). Default 'CC'. nthreads: Number of threads (for ducc0). - sht_engine: Transform engine ('auto', 'shtns', 'ducc0', 'shtools'). + sht_engine: Transform engine ('auto', 'shtns', 'ducc0'). Returns: div: Divergence (ntheta, nphi) @@ -129,10 +127,10 @@ def compute_vort_div( vort = sh.synth(vort_lm) return div, vort - elif resolved_engine in ("ducc0", "shtools"): - import pyshtools as pysh # type: ignore[import-untyped] + elif resolved_engine == "ducc0": + if not DUCC0_AVAILABLE: + raise ImportError("ducc0 is requested but not available.") - backend = "ducc" if resolved_engine == "ducc0" else "shtools" mmax = min(lmax, (nphi - 1) // 2) # Standard spectral derivation from wind components: @@ -144,55 +142,18 @@ def compute_vort_div( # vort_lm = sqrt(l(l+1))/R * B_lm # Analysis - if backend == "ducc" and DUCC0_AVAILABLE: - # v_theta = v, v_phi = u (matches NCL Spherepack convention) - vec_map = np.stack((v, u), axis=0).astype(np.float64) - alm_vec = ducc0.sht.experimental.analysis_2d( - map=vec_map, - spin=1, - lmax=lmax, - mmax=mmax, - geometry=geometry, - nthreads=nthreads, - ) - alm_E = alm_vec[0] - alm_B = alm_vec[1] - else: - # Manual E/B (Spheroidal/Toroidal) decomposition using Fortran SHTOOLS - # SHExpandVDH expects (v_theta, v_phi). v_theta is southward wind. - try: - cilm_s, cilm_t = pysh.backends.shtools.SHExpandVDH(-v, u, lmax=lmax) - - # Spectral Scaling - # div = -l(l+1)/R * s, vort = -l(l+1)/R * t - div_lm_raw = np.zeros_like(cilm_s) - vort_lm_raw = np.zeros_like(cilm_t) - for l_val in range(lmax + 1): - scale = -float(l_val * (l_val + 1)) / R - div_lm_raw[:, l_val, : l_val + 1] = ( - cilm_s[:, l_val, : l_val + 1] * scale - ) - vort_lm_raw[:, l_val, : l_val + 1] = ( - cilm_t[:, l_val, : l_val + 1] * scale - ) - - # Synthesis - div = pysh.backends.shtools.MakeGridDH(div_lm_raw, sampling=2) - vort = pysh.backends.shtools.MakeGridDH(vort_lm_raw, sampling=2) - - # Handle potential shape mismatch (DH vs CC) - if div.shape != (ntheta, nphi): - # Fallback to ducc0 if shapes are weird - return compute_vort_div( - u, v, R, lmax, geometry, nthreads, sht_engine="ducc0" - ) - - return div, vort - except Exception: - # Fallback to ducc0 if Fortran path fails - return compute_vort_div( - u, v, R, lmax, geometry, nthreads, sht_engine="ducc0" - ) + # v_theta = v, v_phi = u (matches NCL Spherepack convention) + vec_map = np.stack((v, u), axis=0).astype(np.float64) + alm_vec = ducc0.sht.analysis_2d( + map=vec_map, + spin=1, + lmax=lmax, + mmax=mmax, + geometry=geometry, + nthreads=nthreads, + ) + alm_E = alm_vec[0] + alm_B = alm_vec[1] # Spectral Scaling l_arr = np.concatenate([np.arange(m, lmax + 1) for m in range(mmax + 1)]) @@ -201,30 +162,26 @@ def compute_vort_div( alm_vort = eigen_scale * alm_B # Synthesis - if backend == "ducc" and DUCC0_AVAILABLE: - div = ducc0.sht.experimental.synthesis_2d( - alm=np.expand_dims(alm_div, axis=0), - spin=0, - lmax=lmax, - mmax=mmax, - ntheta=ntheta, - nphi=nphi, - geometry=geometry, - nthreads=nthreads, - )[0] - vort = ducc0.sht.experimental.synthesis_2d( - alm=np.expand_dims(alm_vort, axis=0), - spin=0, - lmax=lmax, - mmax=mmax, - ntheta=ntheta, - nphi=nphi, - geometry=geometry, - nthreads=nthreads, - )[0] - else: - # This part shouldn't really be reached if we have ducc0 - raise NotImplementedError("Pure shtools vector synthesis not implemented.") + div = ducc0.sht.synthesis_2d( + alm=np.expand_dims(alm_div, axis=0), + spin=0, + lmax=lmax, + mmax=mmax, + ntheta=ntheta, + nphi=nphi, + geometry=geometry, + nthreads=nthreads, + )[0] + vort = ducc0.sht.synthesis_2d( + alm=np.expand_dims(alm_vort, axis=0), + spin=0, + lmax=lmax, + mmax=mmax, + ntheta=ntheta, + nphi=nphi, + geometry=geometry, + nthreads=nthreads, + )[0] return div, vort @@ -238,7 +195,7 @@ def apply_vort_div( lmax: int | None = None, geometry: str = "CC", nthreads: int = 0, - sht_engine: Literal["auto", "shtns", "ducc0", "shtools"] = "auto", + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", backend: Literal["serial", "mpi", "dask"] = "serial", ) -> tuple[xr.DataArray, xr.DataArray]: """ @@ -302,7 +259,7 @@ def __init__( R: float = R_EARTH_METERS, lmax: int | None = None, geometry: str = "CC", - sht_engine: Literal["auto", "shtns", "ducc0", "shtools"] = "auto", + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", ) -> None: """ Initialize the kinematics calculator. diff --git a/src/pystormtracker/preprocessing/spectral.py b/src/pystormtracker/preprocessing/spectral.py index afbabee5..9591a66f 100644 --- a/src/pystormtracker/preprocessing/spectral.py +++ b/src/pystormtracker/preprocessing/spectral.py @@ -28,8 +28,8 @@ class FilterKwargs(TypedDict, total=False): def _resolve_engine( - sht_engine: Literal["auto", "shtns", "ducc0", "shtools"], -) -> Literal["shtns", "ducc0", "shtools"]: + sht_engine: Literal["auto", "shtns", "ducc0"], +) -> Literal["shtns", "ducc0"]: """Resolves 'auto' engine to the best available backend.""" if sht_engine == "auto": return "shtns" if SHTNS_AVAILABLE else "ducc0" @@ -37,7 +37,7 @@ def _resolve_engine( def _get_filter_config( - sht_engine: Literal["auto", "shtns", "ducc0", "shtools"], + sht_engine: Literal["auto", "shtns", "ducc0"], lmin: int, lmax: int, lat_reverse: bool, @@ -56,13 +56,10 @@ def _get_filter_config( raise ImportError("shtns is requested but not available.") return _filter_shtns_frame, kwargs - if resolved_engine == "shtools": - kwargs["backend"] = "shtools" - else: # ducc0 - kwargs["backend"] = "ducc" - kwargs["nthreads"] = nthreads + # ducc0 + kwargs["nthreads"] = nthreads - return _filter_pyshtools_frame, kwargs + return _filter_ducc0_frame, kwargs # Thread-local storage to ensure each Dask thread has its own SHTns object. @@ -79,13 +76,13 @@ def _get_shtns_plan(nlat: int, nlon: int, lmax: int) -> shtns.sht: key = (nlat, nlon, lmax) if key not in _thread_local.cache: - # Sampling theorem limit for regular grid with poles: nlat > 2*lmax - grid_lmax = (nlat - 1) // 2 - actual_lmax = min(lmax, grid_lmax) - mmax = min(actual_lmax, nlon // 2 - 1) + # SHTns can handle lmax beyond sampling theorem for analysis, + # but results might be aliased if not careful. + # NCL/Spherepack T42 uses lmax=42 for 73x144. + mmax = min(lmax, nlon // 2 - 1) - # Use 4pi normalization to match pyshtools default and Hodges TRACK. - sh = shtns.sht(actual_lmax, mmax, norm=shtns.sht_fourpi) + # Use 4pi normalization to match standard SHTOOLS and Hodges TRACK. + sh = shtns.sht(lmax, mmax, norm=shtns.sht_fourpi) # shtns.sht_reg_poles is the standard equidistant latitude grid including poles. # SHT_PHI_CONTIGUOUS matches standard NumPy/C row-major layout (nlat, nlon). sh.set_grid(nlat, nlon, shtns.sht_reg_poles | shtns.SHT_PHI_CONTIGUOUS) @@ -131,56 +128,52 @@ def _filter_shtns_frame( return out -def _filter_pyshtools_frame( +def _filter_ducc0_frame( frame: NDArray[np.float64], lmin: int, lmax: int, lat_reverse: bool = False, - backend: Literal["shtools", "ducc"] = "ducc", nthreads: int = 1, ) -> NDArray[np.float64]: - """Filters a single 2D frame using pyshtools.""" - import pyshtools as pysh # type: ignore[import-untyped] + """Filters a single 2D frame using ducc0.""" + import ducc0 # type: ignore[import-not-found] if lat_reverse: frame = frame[::-1, :] nlat, nlon = frame.shape - - # pyshtools DH grid requires nlon = 2*nlat-1 (extended) or nlon = 2*nlat. - # Typical ERA5 2.5x2.5 is 73x144. nlon=144, nlat=73. 2*nlat-1 = 145. - if nlon == 2 * nlat - 2: - # Pad longitude by repeating first column at the end to get 2*nlat-1 - frame_proc = np.empty((nlat, nlon + 1), dtype=frame.dtype) - frame_proc[:, :-1] = frame - frame_proc[:, -1] = frame[:, 0] - slice_back = True - elif nlon == 2 * nlat - 1 or nlon == 2 * nlat: - frame_proc = frame - slice_back = False - else: - # Try to use as is, let pyshtools raise if incompatible - frame_proc = frame - slice_back = False + + # Regular equidistant grid + geometry = "CC" + mmax = min(lmax, nlon // 2 - 1) try: - grid = pysh.SHGrid.from_array(frame_proc, grid="DH") - coeffs = grid.expand(backend=backend, nthreads=nthreads) + alm = ducc0.sht.analysis_2d( + map=np.expand_dims(frame, axis=0), + spin=0, + lmax=lmax, + mmax=mmax, + geometry=geometry, + nthreads=nthreads, + ) - # Filter operations - coeffs_filtered = coeffs.copy() + # Apply Bandpass Mask + l_arr = np.concatenate([np.arange(m, lmax + 1) for m in range(mmax + 1)]) if lmin > 0: - for l_val in range(min(lmin, coeffs_filtered.lmax + 1)): - coeffs_filtered.coeffs[:, l_val, :] = 0.0 - if lmax < coeffs_filtered.lmax: - for l_val in range(lmax + 1, coeffs_filtered.lmax + 1): - coeffs_filtered.coeffs[:, l_val, :] = 0.0 - - filtered_grid = coeffs_filtered.expand(backend=backend, nthreads=nthreads) - out = cast(NDArray[np.float64], filtered_grid.to_array()) + mask = l_arr < lmin + alm[0, mask] = 0.0 + + out = ducc0.sht.synthesis_2d( + alm=alm, + spin=0, + lmax=lmax, + mmax=mmax, + ntheta=nlat, + nphi=nlon, + geometry=geometry, + nthreads=nthreads, + )[0] - if slice_back: - out = out[:, :-1] if lat_reverse: out = out[::-1, :] @@ -193,7 +186,7 @@ def _filter_pyshtools_frame( class SpectralFilter: """ Spectral bandpass filter (truncation) for lat-lon grid data. - Backends (in order of preference): shtns, ducc0, shtools. + Backends (in order of preference): shtns, ducc0. """ def __init__( @@ -201,7 +194,7 @@ def __init__( lmin: int = 5, lmax: int = 42, lat_reverse: bool = False, - sht_engine: Literal["auto", "shtns", "ducc0", "shtools"] = "auto", + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", ) -> None: """ Initialize the filter with wave number bounds. @@ -210,7 +203,7 @@ def __init__( lmin (int): Minimum total wave number to retain. lmax (int): Maximum total wave number to retain. lat_reverse (bool): If True, assume latitude is stored from South to North. - sht_engine (str): Engine to use ('auto', 'shtns', 'ducc0', 'shtools'). + sht_engine (str): Engine to use ('auto', 'shtns', 'ducc0'). """ self.lmin = lmin self.lmax = lmax @@ -278,7 +271,7 @@ def apply_spectral_filter( lmax: int = 42, lat_reverse: bool = False, backend: Literal["serial", "mpi", "dask"] = "serial", - sht_engine: Literal["auto", "shtns", "ducc0", "shtools"] = "auto", + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", ) -> xr.DataArray: """ Applies a spectral bandpass filter to the input DataArray. @@ -289,7 +282,7 @@ def apply_spectral_filter( lmax (int): Maximum total wave number to retain. Defaults to 42. lat_reverse (bool): If True, assume latitude is South to North. backend (str): Parallelization backend. Options: 'serial', 'mpi', 'dask'. - sht_engine (str): Engine. Options: 'auto', 'shtns', 'ducc0', 'shtools'. + sht_engine (str): Engine. Options: 'auto', 'shtns', 'ducc0'. Returns: xr.DataArray: The filtered data. diff --git a/src/pystormtracker/simple/concurrent.py b/src/pystormtracker/simple/concurrent.py index a809ea0f..bf162a97 100644 --- a/src/pystormtracker/simple/concurrent.py +++ b/src/pystormtracker/simple/concurrent.py @@ -26,6 +26,8 @@ def run_simple_dask( filter: bool = True, lmin: int = constants.LMIN_DEFAULT, lmax: int = constants.LMAX_DEFAULT, + taper_points: int = constants.TAPER_DEFAULT, + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", **kwargs: float | int | str | None, ) -> Tracks: import dask @@ -42,7 +44,11 @@ def run_simple_dask( from .tracker import SimpleTracker data_xr = SimpleTracker().preprocess_standard_track( - data_xr, lmin=lmin, lmax=lmax + data_xr, + lmin=lmin, + lmax=lmax, + taper_points=taper_points, + sht_engine=sht_engine, ) detector_obj = SimpleDetector.from_xarray(data_xr) @@ -103,6 +109,8 @@ def run_simple_mpi( filter: bool = True, lmin: int = constants.LMIN_DEFAULT, lmax: int = constants.LMAX_DEFAULT, + taper_points: int = constants.TAPER_DEFAULT, + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", **kwargs: float | int | str | None, ) -> Tracks: from mpi4py import MPI @@ -123,7 +131,11 @@ def run_simple_mpi( from .tracker import SimpleTracker data_xr = SimpleTracker().preprocess_standard_track( - data_xr, lmin=lmin, lmax=lmax + data_xr, + lmin=lmin, + lmax=lmax, + taper_points=taper_points, + sht_engine=sht_engine, ) detector_obj = SimpleDetector.from_xarray(data_xr) diff --git a/src/pystormtracker/simple/tracker.py b/src/pystormtracker/simple/tracker.py index 04a5e05d..334815b0 100644 --- a/src/pystormtracker/simple/tracker.py +++ b/src/pystormtracker/simple/tracker.py @@ -50,6 +50,7 @@ def preprocess_standard_track( lmin: int = constants.LMIN_DEFAULT, lmax: int = constants.LMAX_DEFAULT, taper_points: int = constants.TAPER_DEFAULT, + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", ) -> xr.DataArray: """ Applies standard spectral preprocessing. @@ -69,7 +70,7 @@ def preprocess_standard_track( data = cast(xr.DataArray, taper.filter(data)) # 2. Spectral Filtering - spectral_filter = SpectralFilter(lmin=lmin, lmax=lmax) + spectral_filter = SpectralFilter(lmin=lmin, lmax=lmax, sht_engine=sht_engine) data = spectral_filter.filter(data) return data @@ -85,6 +86,8 @@ def _detect_serial( filter: bool = False, lmin: int = constants.LMIN_DEFAULT, lmax: int = constants.LMAX_DEFAULT, + taper_points: int = constants.TAPER_DEFAULT, + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", **kwargs: float | int | str | None, ) -> Tracks: import timeit @@ -96,7 +99,13 @@ def _detect_serial( data_xr = detector_peek.get_xarray() if filter: - data_xr = self.preprocess_standard_track(data_xr, lmin=lmin, lmax=lmax) + data_xr = self.preprocess_standard_track( + data_xr, + lmin=lmin, + lmax=lmax, + taper_points=taper_points, + sht_engine=sht_engine, + ) t_pre = timeit.default_timer() print(f" [Serial] Preprocessing time: {t_pre - t0:.4f}s") @@ -133,6 +142,7 @@ def track( lmin: int = constants.LMIN_DEFAULT, lmax: int = constants.LMAX_DEFAULT, taper_points: int = constants.TAPER_DEFAULT, + sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", **kwargs: float | int | str | None, ) -> Tracks: import timeit @@ -164,6 +174,8 @@ def track( filter=filter, lmin=lmin, lmax=lmax, + taper_points=taper_points, + sht_engine=sht_engine, **kwargs, ) elif backend == "dask": @@ -181,6 +193,8 @@ def track( filter=filter, lmin=lmin, lmax=lmax, + taper_points=taper_points, + sht_engine=sht_engine, **kwargs, ) else: @@ -194,6 +208,8 @@ def track( filter=filter, lmin=lmin, lmax=lmax, + taper_points=taper_points, + sht_engine=sht_engine, **kwargs, ) diff --git a/tests/test_cli.py b/tests/test_cli.py index 97ad8e81..502a9ca9 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -60,6 +60,8 @@ def test_parse_args() -> None: "serial", "-w", "4", + "--sht-engine", + "ducc0", ] with patch("sys.argv", test_args): args = parse_args() @@ -71,6 +73,7 @@ def test_parse_args() -> None: assert args.mode == "max" assert args.backend == "serial" assert args.workers == 4 + assert args.sht_engine == "ducc0" def test_main(msl_data: str, tmp_path: Path) -> None: diff --git a/tests/test_integration_spectral.py b/tests/test_integration_spectral.py index 4a14f688..4fc38ca7 100644 --- a/tests/test_integration_spectral.py +++ b/tests/test_integration_spectral.py @@ -57,10 +57,9 @@ def test_spectral_filter_era5_parity_integration(case: FilterTestCase) -> None: msl = ds_msl.msl ref = ds_ref.msl - engines: list[Literal["shtns", "ducc0", "shtools"]] = [ + engines: list[Literal["shtns", "ducc0"]] = [ "shtns", "ducc0", - "shtools", ] for engine in engines: diff --git a/tests/test_integration_vorticity.py b/tests/test_integration_vorticity.py index 8d7e7217..8dcecb88 100644 --- a/tests/test_integration_vorticity.py +++ b/tests/test_integration_vorticity.py @@ -40,7 +40,7 @@ def test_vorticity_divergence_parity_integration() -> None: div, vort = apply_vort_div(u, v, sht_engine=engine) if engine == "ducc0": - # ducc0/pyshtools should be bit-wise identical to NCL Spherepack + # ducc0 should be bit-wise identical to NCL Spherepack (on the same grid) np.testing.assert_allclose( vort.values, vo_ref.values, rtol=1e-12, atol=1e-12 ) @@ -68,19 +68,7 @@ def test_vorticity_internal_consistency() -> None: _, vort_ducc = apply_vort_div(u, v, sht_engine="ducc0") - try: - from importlib.util import find_spec - - if find_spec("pyshtools") is not None: - _, vort_shtools = apply_vort_div(u, v, sht_engine="shtools") - - rmse_shtools = np.sqrt( - np.mean((vort_ducc.values - vort_shtools.values) ** 2) - ) - assert rmse_shtools < 1e-10 # Should be nearly identical - except ImportError: - pass - + # Comparing ducc0 with shtns. try: import shtns # type: ignore[import-untyped] diff --git a/uv.lock b/uv.lock index 84217507..ffc541f5 100644 --- a/uv.lock +++ b/uv.lock @@ -27,15 +27,15 @@ wheels = [ [[package]] name = "anyio" -version = "4.12.1" +version = "4.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, + { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" }, ] [[package]] @@ -103,38 +103,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ed/c9/d7977eaacb9df673210491da99e6a247e93df98c715fc43fd136ce1d3d33/arrow-1.4.0-py3-none-any.whl", hash = "sha256:749f0769958ebdc79c173ff0b0670d59051a535fa26e8eba02953dc19eb43205", size = 68797, upload-time = "2025-10-18T17:46:45.663Z" }, ] -[[package]] -name = "astropy" -version = "7.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "astropy-iers-data" }, - { name = "numpy" }, - { name = "packaging" }, - { name = "pyerfa" }, - { name = "pyyaml" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7b/92/2dce2d48347efc3346d08ca7995b152d242ebd170c571f7c9346468d8427/astropy-7.2.0.tar.gz", hash = "sha256:ae48bc26b1feaeb603cd94bd1fa1aa39137a115fe931b7f13787ab420e8c3070", size = 7057774, upload-time = "2025-11-25T22:36:41.916Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/6d/6330a844bad8dfc4875e0f2fa1db1fee87837ba9805aa8a8d048c071363a/astropy-7.2.0-cp311-abi3-macosx_10_9_x86_64.whl", hash = "sha256:efac04df4cc488efe630c2fff1992d6516dfb16a06e197fb68bc9e8e3b85def1", size = 6442332, upload-time = "2025-11-25T22:36:23.6Z" }, - { url = "https://files.pythonhosted.org/packages/a6/ba/3418133ba144dfcd1530bca5a6b695f4cdd21a8abaaa2ac4e5450d11b028/astropy-7.2.0-cp311-abi3-macosx_11_0_arm64.whl", hash = "sha256:52e9a7d9c86b21f1af911a2930cd0c4a275fb302d455c89e11eedaffef6f2ad0", size = 6413656, upload-time = "2025-11-25T22:36:26.548Z" }, - { url = "https://files.pythonhosted.org/packages/be/ba/05e43b5a7d738316a097fa78524d3eaaff5986294b4a052d4adb3c45e7c0/astropy-7.2.0-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97c370421b9bb13d4c762c7af06d172bad7c01bd5bcf88314f6913c3c235b770", size = 9758867, upload-time = "2025-11-25T22:36:28.661Z" }, - { url = "https://files.pythonhosted.org/packages/c3/1c/f06ad85180e7dd9855aa5ede901bfc2be858d7bee17d4e978a14c0ecec14/astropy-7.2.0-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f39ce2c80211fbceb005d377a5478cd0d66c42aa1498d252f2239fe5a025c24", size = 9789007, upload-time = "2025-11-25T22:36:31.063Z" }, - { url = "https://files.pythonhosted.org/packages/f8/fb/e4d35194a5009d7a73333079481a4ef1380a255d67b9c1db578151a5fb50/astropy-7.2.0-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ad4d71db994d45f046a1a5449000cf0f88ab6367cb67658500654a0586d6ab19", size = 9748547, upload-time = "2025-11-25T22:36:33.154Z" }, - { url = "https://files.pythonhosted.org/packages/36/ea/f990730978ae0a7a34705f885d2f3806928c5f0bc22eefd6a1a23539cc32/astropy-7.2.0-cp311-abi3-win32.whl", hash = "sha256:95161f26602433176483e8bde8ab1a8ca09148f5b4bf5190569a26d381091598", size = 6237228, upload-time = "2025-11-25T22:36:35.236Z" }, - { url = "https://files.pythonhosted.org/packages/ec/bc/f4378f586dd63902c37d16f68f35f7d555b3b32e08ac6b1d633eb0a48805/astropy-7.2.0-cp311-abi3-win_amd64.whl", hash = "sha256:dc7c340ba1713e55c93071b32033f3153470a0f663a4d539c03a7c9b44020790", size = 6362868, upload-time = "2025-11-25T22:36:37.784Z" }, - { url = "https://files.pythonhosted.org/packages/77/79/b6d4bf01913cfd4ce0cd4c1be5916beccdb92b2970bab8c827984231eae6/astropy-7.2.0-cp311-abi3-win_arm64.whl", hash = "sha256:0c428735a3f15b05c2095bc6ccb5f98a64bc99fb7015866af19ff8492420ddaf", size = 6221756, upload-time = "2025-11-25T22:36:39.852Z" }, -] - -[[package]] -name = "astropy-iers-data" -version = "0.2026.3.23.0.51.38" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/78/e6/76dcf596ddedd0cc3971cc54e00580c4cf092982332ba137e397fc811b71/astropy_iers_data-0.2026.3.23.0.51.38.tar.gz", hash = "sha256:6f94246080a0ea757e3e10dc2c62467bb5b14b9b83abbb6069b0c491ac22ca68", size = 1928481, upload-time = "2026-03-23T00:52:22.323Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ca/b8/7e6c709267796804ee6cb25e58dd97d45f6f67d0c4dcbe710ef6991bfdae/astropy_iers_data-0.2026.3.23.0.51.38-py3-none-any.whl", hash = "sha256:a1a7b94db4e354a529695fcb8e5ed59f8212fdb31fdf3a9a07bcbc09bf6a3484", size = 1985387, upload-time = "2026-03-23T00:52:20.885Z" }, -] - [[package]] name = "asttokens" version = "3.0.1" @@ -201,20 +169,6 @@ css = [ { name = "tinycss2" }, ] -[[package]] -name = "boule" -version = "0.5.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "numpy" }, - { name = "scipy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6f/cb/ba840a6f38c045310ac223edc799a69842a959f549aeea3fee0644472e83/boule-0.5.0.tar.gz", hash = "sha256:fde5908b8a88f0f05ccbf23f2aa422b9d81a2a3e6ed6c1ce65afda0bb6254717", size = 33514, upload-time = "2024-10-22T21:35:43.393Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/a1/fff65b40476cbdd2626aca3f744347921aca9656638277302679c2a608d2/boule-0.5.0-py3-none-any.whl", hash = "sha256:374129df477a4551db9cdfae7fb1578f9d3f1978f826f5d60bd0eab98ba31f15", size = 37180, upload-time = "2024-10-22T21:35:42.026Z" }, -] - [[package]] name = "cartopy" version = "0.25.0" @@ -830,7 +784,7 @@ name = "eccodeslib" version = "2.46.1.15" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "eckitlib", marker = "sys_platform != 'win32'" }, + { name = "eckitlib" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/dc/49/0a6856515155a8a2321f1df178e57ac11d55a799fd0f57c16c6b26cd648e/eccodeslib-2.46.1.15-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:0f50171a76b0d6f1ac45344a06a42eb80c51fa903eda4a3caf4ecaad9878bb91", size = 8891149, upload-time = "2026-03-20T15:35:34.581Z" }, @@ -1097,7 +1051,7 @@ name = "importlib-metadata" version = "9.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "zipp", marker = "python_full_version < '3.12'" }, + { name = "zipp" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a9/01/15bb152d77b21318514a96f43af312635eb2500c96b55398d020c93d86ea/importlib_metadata-9.0.0.tar.gz", hash = "sha256:a4f57ab599e6a2e3016d7595cfd72eb4661a5106e787a95bcc90c7105b831efc", size = 56405, upload-time = "2026-03-20T06:42:56.999Z" } wheels = [ @@ -1291,11 +1245,11 @@ wheels = [ [[package]] name = "jsonpointer" -version = "3.1.0" +version = "3.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/48/bf/9ecc036fbc15cf4153ea6ed4dbeed31ef043f762cccc9d44a534be8319b0/jsonpointer-3.1.0.tar.gz", hash = "sha256:f9b39abd59ba8c1de8a4ff16141605d2a8dacc4dd6cf399672cf237bfe47c211", size = 9000, upload-time = "2026-03-20T21:47:09.982Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/c7/af399a2e7a67fd18d63c40c5e62d3af4e67b836a2107468b6a5ea24c4304/jsonpointer-3.1.1.tar.gz", hash = "sha256:0b801c7db33a904024f6004d526dcc53bbb8a4a0f4e32bfd10beadf60adf1900", size = 9068, upload-time = "2026-03-23T22:32:32.458Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/25/cebb241a435cbf4626b5ea096d8385c04416d7ca3082a15299b746e248fa/jsonpointer-3.1.0-py3-none-any.whl", hash = "sha256:f82aa0f745001f169b96473348370b43c3f581446889c41c807bab1db11c8e7b", size = 7651, upload-time = "2026-03-20T21:47:08.792Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6a/a83720e953b1682d2d109d3c2dbb0bc9bf28cc1cbc205be4ef4be5da709d/jsonpointer-3.1.1-py3-none-any.whl", hash = "sha256:8ff8b95779d071ba472cf5bc913028df06031797532f08a7d5b602d8b2a488ca", size = 7659, upload-time = "2026-03-23T22:32:31.568Z" }, ] [[package]] @@ -2565,24 +2519,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, ] -[[package]] -name = "pyerfa" -version = "2.0.1.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/39/63cc8291b0cf324ae710df41527faf7d331bce573899199d926b3e492260/pyerfa-2.0.1.5.tar.gz", hash = "sha256:17d6b24fe4846c65d5e7d8c362dcb08199dc63b30a236aedd73875cc83e1f6c0", size = 818430, upload-time = "2024-11-11T15:22:30.852Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7d/d9/3448a57cb5bd19950de6d6ab08bd8fbb3df60baa71726de91d73d76c481b/pyerfa-2.0.1.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b282d7c60c4c47cf629c484c17ac504fcb04abd7b3f4dfcf53ee042afc3a5944", size = 341818, upload-time = "2024-11-11T15:22:16.467Z" }, - { url = "https://files.pythonhosted.org/packages/11/4a/31a363370478b63c6289a34743f2ba2d3ae1bd8223e004d18ab28fb92385/pyerfa-2.0.1.5-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:be1aeb70390dd03a34faf96749d5cabc58437410b4aab7213c512323932427df", size = 329370, upload-time = "2024-11-11T15:22:17.829Z" }, - { url = "https://files.pythonhosted.org/packages/cb/96/b6210fc624123c8ae13e1eecb68fb75e3f3adff216d95eee1c7b05843e3e/pyerfa-2.0.1.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0603e8e1b839327d586c8a627cdc634b795e18b007d84f0cda5500a0908254e", size = 692794, upload-time = "2024-11-11T15:22:19.429Z" }, - { url = "https://files.pythonhosted.org/packages/e5/e0/050018d855d26d3c0b4a7d1b2ed692be758ce276d8289e2a2b44ba1014a5/pyerfa-2.0.1.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e43c7194e3242083f2350b46c09fd4bf8ba1bcc0ebd1460b98fc47fe2389906", size = 738711, upload-time = "2024-11-11T15:22:20.661Z" }, - { url = "https://files.pythonhosted.org/packages/b9/f5/ff91ee77308793ae32fa1e1de95e9edd4551456dd888b4e87c5938657ca5/pyerfa-2.0.1.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:07b80cd70701f5d066b1ac8cce406682cfcd667a1186ec7d7ade597239a6021d", size = 722966, upload-time = "2024-11-11T15:22:21.905Z" }, - { url = "https://files.pythonhosted.org/packages/2c/56/b22b35c8551d2228ff8d445e63787112927ca13f6dc9e2c04f69d742c95b/pyerfa-2.0.1.5-cp39-abi3-win32.whl", hash = "sha256:d30b9b0df588ed5467e529d851ea324a67239096dd44703125072fd11b351ea2", size = 339955, upload-time = "2024-11-11T15:22:23.087Z" }, - { url = "https://files.pythonhosted.org/packages/b4/11/97233cf23ad5411ac6f13b1d6ee3888f90ace4f974d9bf9db887aa428912/pyerfa-2.0.1.5-cp39-abi3-win_amd64.whl", hash = "sha256:66292d437dcf75925b694977aa06eb697126e7b86553e620371ed3e48b5e0ad0", size = 349410, upload-time = "2024-11-11T15:22:24.817Z" }, -] - [[package]] name = "pygments" version = "2.19.2" @@ -2675,59 +2611,16 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/82/06/cad54e8ce758bd836ee5411691cbd49efeb9cc611b374670fce299519334/pyshp-3.0.3-py3-none-any.whl", hash = "sha256:28c8fac8c0c25bb0fecbbfd10ead7f319c2ff2f3b0b44a94f22bd2c93510ad42", size = 58465, upload-time = "2025-11-28T17:47:30.328Z" }, ] -[[package]] -name = "pyshtools" -version = "4.14.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "astropy" }, - { name = "boule" }, - { name = "matplotlib" }, - { name = "numpy" }, - { name = "pooch" }, - { name = "requests" }, - { name = "scipy" }, - { name = "tqdm" }, - { name = "xarray" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/89/dc/6a8441467b70be206136f223c4640e4b78fc2df3f0018c470f8710d3ae9a/pyshtools-4.14.1.tar.gz", hash = "sha256:40a20f1292cafb29b095139abd07c0cbf2da09b91b6573512e86054503914677", size = 41584798, upload-time = "2026-02-23T07:23:31.167Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/e1/4c922d60edebf703cbc64cab9960e92e0cefafd9b3eec6077bb047ef9d2e/pyshtools-4.14.1-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:eebdf162266e15f042e9b509ea410b4cab15c96a127e62a87d3c47dcbc73e69d", size = 2104325, upload-time = "2026-02-23T07:22:52.651Z" }, - { url = "https://files.pythonhosted.org/packages/9f/f7/17f2e3a05a60acfb6b9e1f59f166f598a0802c238dbd9076404fbca0ec5e/pyshtools-4.14.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c24c04202b10abeeba77128e22feb7128f12d3afb946a085d0087e3054fb80b8", size = 13911794, upload-time = "2026-02-23T07:22:54.327Z" }, - { url = "https://files.pythonhosted.org/packages/cb/00/b3b1a011c290b4ff337897ab3cc1ff0b58ed5924b0357f0ebbb1b592d07e/pyshtools-4.14.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6d895594113f0bfdeba989f3f40be35b61640bd4a02b1c99439778efda834902", size = 6414723, upload-time = "2026-02-23T07:22:56.458Z" }, - { url = "https://files.pythonhosted.org/packages/33/89/a96d809768b37b3b487a9d72d037107545d08133b6617917e2713fd85a6b/pyshtools-4.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:df166dad89ad3e15b3b1d3479a483724e1c4c89747a7e40387733cc899fd8034", size = 19228217, upload-time = "2026-02-23T07:22:58.1Z" }, - { url = "https://files.pythonhosted.org/packages/f0/2e/778dbbcb85987c1ffd388017fae6a3935e4ee275e07ea5f90cfee73247e6/pyshtools-4.14.1-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:b2c19eb5bba2d9644874a0a5816f7e17d69973041b8e54937f2626d4c8ecaf35", size = 2104600, upload-time = "2026-02-23T07:23:00.404Z" }, - { url = "https://files.pythonhosted.org/packages/94/01/b0a6081d7495838a22aa81c1f332884b2b566f0fa5d3cda6dfce6067ba82/pyshtools-4.14.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bee396aad9c88402a86c522139ce8029fb58cd88d5490906eb4bd77103d9cda2", size = 13911889, upload-time = "2026-02-23T07:23:01.969Z" }, - { url = "https://files.pythonhosted.org/packages/d8/94/6230ed95a35abc2b18512d20c6351f68975058ef9869f8ccc0a6845206dc/pyshtools-4.14.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:be39c5a4f281c54a0c07d81eaa4f5faa538a2533290cb10ba9daaa7f3cd6750d", size = 6414911, upload-time = "2026-02-23T07:23:04.165Z" }, - { url = "https://files.pythonhosted.org/packages/56/a9/3f6ffe9d94ac1ec57da1aa4b8818e3022093fb29b57b73fa351ed415a5a4/pyshtools-4.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:f9cc2221c0c4364363cf96d145b2a37a0328db8f790bb0ab2f07707c395f1df5", size = 19228378, upload-time = "2026-02-23T07:23:05.793Z" }, - { url = "https://files.pythonhosted.org/packages/72/af/fef43ce3d1a95b7409fa4536c9ecb7ccff62beda2368b714650b781fcb38/pyshtools-4.14.1-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:05b6fc8a05c7d6a467b3a425695742f3e5e6e450ace53ab54b91652785d262fd", size = 2104576, upload-time = "2026-02-23T07:23:07.861Z" }, - { url = "https://files.pythonhosted.org/packages/2c/e1/6cd2c152cb1dcf75f9d677e6b818f00e174d791554cf387d4fa7de8c21f4/pyshtools-4.14.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c4fe28eb15ec45440a70e30059a3e82486357f41851ef9c8fac85d6aa573f02", size = 13911899, upload-time = "2026-02-23T07:23:09.472Z" }, - { url = "https://files.pythonhosted.org/packages/fd/c3/2318cdf5417c8c356d9ce77c78bda5cdb1a68eb3b84a6ae6c4be80c197ce/pyshtools-4.14.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:7a1f62f58c7373a70896002dba1894497b6b487a9c3586768c2014c39ba29deb", size = 6414857, upload-time = "2026-02-23T07:23:11.54Z" }, - { url = "https://files.pythonhosted.org/packages/e2/7e/bfe556e00cb3c38584822d177b2fab0f6f0c6cf9b2f2273ad6af7a0dabc5/pyshtools-4.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:268a891950b6d9899260ecf4ad5a9c58d1beab21a09115b27c37e132b762756c", size = 19228366, upload-time = "2026-02-23T07:23:13.067Z" }, - { url = "https://files.pythonhosted.org/packages/3d/f6/4f1d99441c6e1eb6c27919e56985f0c6197991fa1db801954737460d5311/pyshtools-4.14.1-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:809ebc263e49b0c4bd146b1b82cd39deab5a4e83aad3b6cedcfffeaba77c17fb", size = 2104578, upload-time = "2026-02-23T07:23:15.109Z" }, - { url = "https://files.pythonhosted.org/packages/0a/22/a501c14c7ca8c7142fd77ab8a81473b0d24a51ef9801cd153cda1a8144e1/pyshtools-4.14.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:df588cda4f533cba6658c47b9d18baf2c2f5729ca3c2a6de441949d601c6a28c", size = 13911857, upload-time = "2026-02-23T07:23:16.537Z" }, - { url = "https://files.pythonhosted.org/packages/fa/d8/77f54fb0bd6ec45af4cbebaf90848bd3cc19131422c16001bebc35856df4/pyshtools-4.14.1-cp314-cp314-win_amd64.whl", hash = "sha256:fbcfc82a25e6eaf1e475b2ef1ae51072a1c56d5b5aa22176791e00d1f301cc21", size = 19589341, upload-time = "2026-02-23T07:23:18.756Z" }, - { url = "https://files.pythonhosted.org/packages/c3/33/229b8816662587fd0fe9f3b254815bc50bc3971918c98b50ae31bafbc1b1/pyshtools-4.14.1-cp314-cp314t-macosx_15_0_arm64.whl", hash = "sha256:06f83d1e6b3268722d5ac7226d9b8b98ec2ca8a73d5f5be7ca2e67083155336a", size = 2106383, upload-time = "2026-02-23T07:23:21.312Z" }, - { url = "https://files.pythonhosted.org/packages/f1/03/be83f28bfcd1ebcff8fcbdc924d59c956462a26e95c5a3b3aa219c844f5c/pyshtools-4.14.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:114b74c4c31ca5a48f8882fcf8cce0e2f19c640298614ce25fe12ee860b56cee", size = 13914757, upload-time = "2026-02-23T07:23:23.041Z" }, - { url = "https://files.pythonhosted.org/packages/ca/8b/e343e3d5875fb231942d4cbf8cb7e6b6db59990ecec89281252bedafd425/pyshtools-4.14.1-cp314-cp314t-win_amd64.whl", hash = "sha256:053096acb6465e61136f290483735593492c0bcd18f6254740e56b565fc174f0", size = 19592543, upload-time = "2026-02-23T07:23:25.378Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ed/6e9ff799d3334999a0dfd640b5fad55fc350eda2493560f425e8710b8806/pyshtools-4.14.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4d2d5b6b125c31058ed23050d8987ca2eb175902a776aa3acffc005ffc8d95be", size = 6412232, upload-time = "2026-02-23T07:23:29.269Z" }, -] - -[package.optional-dependencies] -ducc = [ - { name = "ducc0" }, -] - [[package]] name = "pystormtracker" version = "0.5.0.dev0" source = { editable = "." } dependencies = [ { name = "dask" }, + { name = "ducc0" }, { name = "h5netcdf", extra = ["h5py"] }, { name = "numba" }, { name = "numpy" }, - { name = "pyshtools", extra = ["ducc"] }, { name = "xarray" }, ] @@ -2785,6 +2678,7 @@ requires-dist = [ { name = "cfgrib", marker = "extra == 'all'", specifier = ">=0.9.15.1" }, { name = "cfgrib", marker = "extra == 'grib'", specifier = ">=0.9.15.1" }, { name = "dask", specifier = ">=2024.1.0" }, + { name = "ducc0", specifier = ">=0.33.0" }, { name = "eccodes", marker = "extra == 'all'", specifier = ">=2.43.0" }, { name = "eccodes", marker = "extra == 'grib'", specifier = ">=2.43.0" }, { name = "eccodeslib", marker = "sys_platform != 'win32' and extra == 'all'", specifier = ">=2.43.0" }, @@ -2801,7 +2695,6 @@ requires-dist = [ { name = "netcdf4", marker = "extra == 'netcdf4'", specifier = ">=1.6.1" }, { name = "numba", specifier = ">=0.60.0" }, { name = "numpy", specifier = ">=1.24.0" }, - { name = "pyshtools", extras = ["ducc"], specifier = ">=4.10.4" }, { name = "shtns", marker = "platform_machine != 'aarch64' and platform_machine != 'arm64' and extra == 'all'", specifier = ">=3.7.3" }, { name = "shtns", marker = "platform_machine != 'aarch64' and platform_machine != 'arm64' and extra == 'shtns'", specifier = ">=3.7.3" }, { name = "sphinx", marker = "extra == 'docs'", specifier = ">=9.0.4" }, @@ -3209,77 +3102,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8f/e8/726643a3ea68c727da31570bde48c7a10f1aa60eddd628d94078fec586ff/ruff-0.15.7-py3-none-win_arm64.whl", hash = "sha256:18e8d73f1c3fdf27931497972250340f92e8c861722161a9caeb89a58ead6ed2", size = 11023304, upload-time = "2026-03-19T16:26:51.669Z" }, ] -[[package]] -name = "scipy" -version = "1.17.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7a/97/5a3609c4f8d58b039179648e62dd220f89864f56f7357f5d4f45c29eb2cc/scipy-1.17.1.tar.gz", hash = "sha256:95d8e012d8cb8816c226aef832200b1d45109ed4464303e997c5b13122b297c0", size = 30573822, upload-time = "2026-02-23T00:26:24.851Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/df/75/b4ce781849931fef6fd529afa6b63711d5a733065722d0c3e2724af9e40a/scipy-1.17.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:1f95b894f13729334fb990162e911c9e5dc1ab390c58aa6cbecb389c5b5e28ec", size = 31613675, upload-time = "2026-02-23T00:16:00.13Z" }, - { url = "https://files.pythonhosted.org/packages/f7/58/bccc2861b305abdd1b8663d6130c0b3d7cc22e8d86663edbc8401bfd40d4/scipy-1.17.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:e18f12c6b0bc5a592ed23d3f7b891f68fd7f8241d69b7883769eb5d5dfb52696", size = 28162057, upload-time = "2026-02-23T00:16:09.456Z" }, - { url = "https://files.pythonhosted.org/packages/6d/ee/18146b7757ed4976276b9c9819108adbc73c5aad636e5353e20746b73069/scipy-1.17.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a3472cfbca0a54177d0faa68f697d8ba4c80bbdc19908c3465556d9f7efce9ee", size = 20334032, upload-time = "2026-02-23T00:16:17.358Z" }, - { url = "https://files.pythonhosted.org/packages/ec/e6/cef1cf3557f0c54954198554a10016b6a03b2ec9e22a4e1df734936bd99c/scipy-1.17.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:766e0dc5a616d026a3a1cffa379af959671729083882f50307e18175797b3dfd", size = 22709533, upload-time = "2026-02-23T00:16:25.791Z" }, - { url = "https://files.pythonhosted.org/packages/4d/60/8804678875fc59362b0fb759ab3ecce1f09c10a735680318ac30da8cd76b/scipy-1.17.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:744b2bf3640d907b79f3fd7874efe432d1cf171ee721243e350f55234b4cec4c", size = 33062057, upload-time = "2026-02-23T00:16:36.931Z" }, - { url = "https://files.pythonhosted.org/packages/09/7d/af933f0f6e0767995b4e2d705a0665e454d1c19402aa7e895de3951ebb04/scipy-1.17.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43af8d1f3bea642559019edfe64e9b11192a8978efbd1539d7bc2aaa23d92de4", size = 35349300, upload-time = "2026-02-23T00:16:49.108Z" }, - { url = "https://files.pythonhosted.org/packages/b4/3d/7ccbbdcbb54c8fdc20d3b6930137c782a163fa626f0aef920349873421ba/scipy-1.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd96a1898c0a47be4520327e01f874acfd61fb48a9420f8aa9f6483412ffa444", size = 35127333, upload-time = "2026-02-23T00:17:01.293Z" }, - { url = "https://files.pythonhosted.org/packages/e8/19/f926cb11c42b15ba08e3a71e376d816ac08614f769b4f47e06c3580c836a/scipy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4eb6c25dd62ee8d5edf68a8e1c171dd71c292fdae95d8aeb3dd7d7de4c364082", size = 37741314, upload-time = "2026-02-23T00:17:12.576Z" }, - { url = "https://files.pythonhosted.org/packages/95/da/0d1df507cf574b3f224ccc3d45244c9a1d732c81dcb26b1e8a766ae271a8/scipy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:d30e57c72013c2a4fe441c2fcb8e77b14e152ad48b5464858e07e2ad9fbfceff", size = 36607512, upload-time = "2026-02-23T00:17:23.424Z" }, - { url = "https://files.pythonhosted.org/packages/68/7f/bdd79ceaad24b671543ffe0ef61ed8e659440eb683b66f033454dcee90eb/scipy-1.17.1-cp311-cp311-win_arm64.whl", hash = "sha256:9ecb4efb1cd6e8c4afea0daa91a87fbddbce1b99d2895d151596716c0b2e859d", size = 24599248, upload-time = "2026-02-23T00:17:34.561Z" }, - { url = "https://files.pythonhosted.org/packages/35/48/b992b488d6f299dbe3f11a20b24d3dda3d46f1a635ede1c46b5b17a7b163/scipy-1.17.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:35c3a56d2ef83efc372eaec584314bd0ef2e2f0d2adb21c55e6ad5b344c0dcb8", size = 31610954, upload-time = "2026-02-23T00:17:49.855Z" }, - { url = "https://files.pythonhosted.org/packages/b2/02/cf107b01494c19dc100f1d0b7ac3cc08666e96ba2d64db7626066cee895e/scipy-1.17.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:fcb310ddb270a06114bb64bbe53c94926b943f5b7f0842194d585c65eb4edd76", size = 28172662, upload-time = "2026-02-23T00:18:01.64Z" }, - { url = "https://files.pythonhosted.org/packages/cf/a9/599c28631bad314d219cf9ffd40e985b24d603fc8a2f4ccc5ae8419a535b/scipy-1.17.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:cc90d2e9c7e5c7f1a482c9875007c095c3194b1cfedca3c2f3291cdc2bc7c086", size = 20344366, upload-time = "2026-02-23T00:18:12.015Z" }, - { url = "https://files.pythonhosted.org/packages/35/f5/906eda513271c8deb5af284e5ef0206d17a96239af79f9fa0aebfe0e36b4/scipy-1.17.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:c80be5ede8f3f8eded4eff73cc99a25c388ce98e555b17d31da05287015ffa5b", size = 22704017, upload-time = "2026-02-23T00:18:21.502Z" }, - { url = "https://files.pythonhosted.org/packages/da/34/16f10e3042d2f1d6b66e0428308ab52224b6a23049cb2f5c1756f713815f/scipy-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e19ebea31758fac5893a2ac360fedd00116cbb7628e650842a6691ba7ca28a21", size = 32927842, upload-time = "2026-02-23T00:18:35.367Z" }, - { url = "https://files.pythonhosted.org/packages/01/8e/1e35281b8ab6d5d72ebe9911edcdffa3f36b04ed9d51dec6dd140396e220/scipy-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02ae3b274fde71c5e92ac4d54bc06c42d80e399fec704383dcd99b301df37458", size = 35235890, upload-time = "2026-02-23T00:18:49.188Z" }, - { url = "https://files.pythonhosted.org/packages/c5/5c/9d7f4c88bea6e0d5a4f1bc0506a53a00e9fcb198de372bfe4d3652cef482/scipy-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a604bae87c6195d8b1045eddece0514d041604b14f2727bbc2b3020172045eb", size = 35003557, upload-time = "2026-02-23T00:18:54.74Z" }, - { url = "https://files.pythonhosted.org/packages/65/94/7698add8f276dbab7a9de9fb6b0e02fc13ee61d51c7c3f85ac28b65e1239/scipy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f590cd684941912d10becc07325a3eeb77886fe981415660d9265c4c418d0bea", size = 37625856, upload-time = "2026-02-23T00:19:00.307Z" }, - { url = "https://files.pythonhosted.org/packages/a2/84/dc08d77fbf3d87d3ee27f6a0c6dcce1de5829a64f2eae85a0ecc1f0daa73/scipy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:41b71f4a3a4cab9d366cd9065b288efc4d4f3c0b37a91a8e0947fb5bd7f31d87", size = 36549682, upload-time = "2026-02-23T00:19:07.67Z" }, - { url = "https://files.pythonhosted.org/packages/bc/98/fe9ae9ffb3b54b62559f52dedaebe204b408db8109a8c66fdd04869e6424/scipy-1.17.1-cp312-cp312-win_arm64.whl", hash = "sha256:f4115102802df98b2b0db3cce5cb9b92572633a1197c77b7553e5203f284a5b3", size = 24547340, upload-time = "2026-02-23T00:19:12.024Z" }, - { url = "https://files.pythonhosted.org/packages/76/27/07ee1b57b65e92645f219b37148a7e7928b82e2b5dbeccecb4dff7c64f0b/scipy-1.17.1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:5e3c5c011904115f88a39308379c17f91546f77c1667cea98739fe0fccea804c", size = 31590199, upload-time = "2026-02-23T00:19:17.192Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ae/db19f8ab842e9b724bf5dbb7db29302a91f1e55bc4d04b1025d6d605a2c5/scipy-1.17.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:6fac755ca3d2c3edcb22f479fceaa241704111414831ddd3bc6056e18516892f", size = 28154001, upload-time = "2026-02-23T00:19:22.241Z" }, - { url = "https://files.pythonhosted.org/packages/5b/58/3ce96251560107b381cbd6e8413c483bbb1228a6b919fa8652b0d4090e7f/scipy-1.17.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:7ff200bf9d24f2e4d5dc6ee8c3ac64d739d3a89e2326ba68aaf6c4a2b838fd7d", size = 20325719, upload-time = "2026-02-23T00:19:26.329Z" }, - { url = "https://files.pythonhosted.org/packages/b2/83/15087d945e0e4d48ce2377498abf5ad171ae013232ae31d06f336e64c999/scipy-1.17.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4b400bdc6f79fa02a4d86640310dde87a21fba0c979efff5248908c6f15fad1b", size = 22683595, upload-time = "2026-02-23T00:19:30.304Z" }, - { url = "https://files.pythonhosted.org/packages/b4/e0/e58fbde4a1a594c8be8114eb4aac1a55bcd6587047efc18a61eb1f5c0d30/scipy-1.17.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b64ca7d4aee0102a97f3ba22124052b4bd2152522355073580bf4845e2550b6", size = 32896429, upload-time = "2026-02-23T00:19:35.536Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5f/f17563f28ff03c7b6799c50d01d5d856a1d55f2676f537ca8d28c7f627cd/scipy-1.17.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:581b2264fc0aa555f3f435a5944da7504ea3a065d7029ad60e7c3d1ae09c5464", size = 35203952, upload-time = "2026-02-23T00:19:42.259Z" }, - { url = "https://files.pythonhosted.org/packages/8d/a5/9afd17de24f657fdfe4df9a3f1ea049b39aef7c06000c13db1530d81ccca/scipy-1.17.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:beeda3d4ae615106d7094f7e7cef6218392e4465cc95d25f900bebabfded0950", size = 34979063, upload-time = "2026-02-23T00:19:47.547Z" }, - { url = "https://files.pythonhosted.org/packages/8b/13/88b1d2384b424bf7c924f2038c1c409f8d88bb2a8d49d097861dd64a57b2/scipy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6609bc224e9568f65064cfa72edc0f24ee6655b47575954ec6339534b2798369", size = 37598449, upload-time = "2026-02-23T00:19:53.238Z" }, - { url = "https://files.pythonhosted.org/packages/35/e5/d6d0e51fc888f692a35134336866341c08655d92614f492c6860dc45bb2c/scipy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:37425bc9175607b0268f493d79a292c39f9d001a357bebb6b88fdfaff13f6448", size = 36510943, upload-time = "2026-02-23T00:20:50.89Z" }, - { url = "https://files.pythonhosted.org/packages/2a/fd/3be73c564e2a01e690e19cc618811540ba5354c67c8680dce3281123fb79/scipy-1.17.1-cp313-cp313-win_arm64.whl", hash = "sha256:5cf36e801231b6a2059bf354720274b7558746f3b1a4efb43fcf557ccd484a87", size = 24545621, upload-time = "2026-02-23T00:20:55.871Z" }, - { url = "https://files.pythonhosted.org/packages/6f/6b/17787db8b8114933a66f9dcc479a8272e4b4da75fe03b0c282f7b0ade8cd/scipy-1.17.1-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:d59c30000a16d8edc7e64152e30220bfbd724c9bbb08368c054e24c651314f0a", size = 31936708, upload-time = "2026-02-23T00:19:58.694Z" }, - { url = "https://files.pythonhosted.org/packages/38/2e/524405c2b6392765ab1e2b722a41d5da33dc5c7b7278184a8ad29b6cb206/scipy-1.17.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:010f4333c96c9bb1a4516269e33cb5917b08ef2166d5556ca2fd9f082a9e6ea0", size = 28570135, upload-time = "2026-02-23T00:20:03.934Z" }, - { url = "https://files.pythonhosted.org/packages/fd/c3/5bd7199f4ea8556c0c8e39f04ccb014ac37d1468e6cfa6a95c6b3562b76e/scipy-1.17.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2ceb2d3e01c5f1d83c4189737a42d9cb2fc38a6eeed225e7515eef71ad301dce", size = 20741977, upload-time = "2026-02-23T00:20:07.935Z" }, - { url = "https://files.pythonhosted.org/packages/d9/b8/8ccd9b766ad14c78386599708eb745f6b44f08400a5fd0ade7cf89b6fc93/scipy-1.17.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:844e165636711ef41f80b4103ed234181646b98a53c8f05da12ca5ca289134f6", size = 23029601, upload-time = "2026-02-23T00:20:12.161Z" }, - { url = "https://files.pythonhosted.org/packages/6d/a0/3cb6f4d2fb3e17428ad2880333cac878909ad1a89f678527b5328b93c1d4/scipy-1.17.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:158dd96d2207e21c966063e1635b1063cd7787b627b6f07305315dd73d9c679e", size = 33019667, upload-time = "2026-02-23T00:20:17.208Z" }, - { url = "https://files.pythonhosted.org/packages/f3/c3/2d834a5ac7bf3a0c806ad1508efc02dda3c8c61472a56132d7894c312dea/scipy-1.17.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74cbb80d93260fe2ffa334efa24cb8f2f0f622a9b9febf8b483c0b865bfb3475", size = 35264159, upload-time = "2026-02-23T00:20:23.087Z" }, - { url = "https://files.pythonhosted.org/packages/4d/77/d3ed4becfdbd217c52062fafe35a72388d1bd82c2d0ba5ca19d6fcc93e11/scipy-1.17.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dbc12c9f3d185f5c737d801da555fb74b3dcfa1a50b66a1a93e09190f41fab50", size = 35102771, upload-time = "2026-02-23T00:20:28.636Z" }, - { url = "https://files.pythonhosted.org/packages/bd/12/d19da97efde68ca1ee5538bb261d5d2c062f0c055575128f11a2730e3ac1/scipy-1.17.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94055a11dfebe37c656e70317e1996dc197e1a15bbcc351bcdd4610e128fe1ca", size = 37665910, upload-time = "2026-02-23T00:20:34.743Z" }, - { url = "https://files.pythonhosted.org/packages/06/1c/1172a88d507a4baaf72c5a09bb6c018fe2ae0ab622e5830b703a46cc9e44/scipy-1.17.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e30bdeaa5deed6bc27b4cc490823cd0347d7dae09119b8803ae576ea0ce52e4c", size = 36562980, upload-time = "2026-02-23T00:20:40.575Z" }, - { url = "https://files.pythonhosted.org/packages/70/b0/eb757336e5a76dfa7911f63252e3b7d1de00935d7705cf772db5b45ec238/scipy-1.17.1-cp313-cp313t-win_arm64.whl", hash = "sha256:a720477885a9d2411f94a93d16f9d89bad0f28ca23c3f8daa521e2dcc3f44d49", size = 24856543, upload-time = "2026-02-23T00:20:45.313Z" }, - { url = "https://files.pythonhosted.org/packages/cf/83/333afb452af6f0fd70414dc04f898647ee1423979ce02efa75c3b0f2c28e/scipy-1.17.1-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:a48a72c77a310327f6a3a920092fa2b8fd03d7deaa60f093038f22d98e096717", size = 31584510, upload-time = "2026-02-23T00:21:01.015Z" }, - { url = "https://files.pythonhosted.org/packages/ed/a6/d05a85fd51daeb2e4ea71d102f15b34fedca8e931af02594193ae4fd25f7/scipy-1.17.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:45abad819184f07240d8a696117a7aacd39787af9e0b719d00285549ed19a1e9", size = 28170131, upload-time = "2026-02-23T00:21:05.888Z" }, - { url = "https://files.pythonhosted.org/packages/db/7b/8624a203326675d7746a254083a187398090a179335b2e4a20e2ddc46e83/scipy-1.17.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:3fd1fcdab3ea951b610dc4cef356d416d5802991e7e32b5254828d342f7b7e0b", size = 20342032, upload-time = "2026-02-23T00:21:09.904Z" }, - { url = "https://files.pythonhosted.org/packages/c9/35/2c342897c00775d688d8ff3987aced3426858fd89d5a0e26e020b660b301/scipy-1.17.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7bdf2da170b67fdf10bca777614b1c7d96ae3ca5794fd9587dce41eb2966e866", size = 22678766, upload-time = "2026-02-23T00:21:14.313Z" }, - { url = "https://files.pythonhosted.org/packages/ef/f2/7cdb8eb308a1a6ae1e19f945913c82c23c0c442a462a46480ce487fdc0ac/scipy-1.17.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:adb2642e060a6549c343603a3851ba76ef0b74cc8c079a9a58121c7ec9fe2350", size = 32957007, upload-time = "2026-02-23T00:21:19.663Z" }, - { url = "https://files.pythonhosted.org/packages/0b/2e/7eea398450457ecb54e18e9d10110993fa65561c4f3add5e8eccd2b9cd41/scipy-1.17.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee2cfda04c00a857206a4330f0c5e3e56535494e30ca445eb19ec624ae75118", size = 35221333, upload-time = "2026-02-23T00:21:25.278Z" }, - { url = "https://files.pythonhosted.org/packages/d9/77/5b8509d03b77f093a0d52e606d3c4f79e8b06d1d38c441dacb1e26cacf46/scipy-1.17.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d2650c1fb97e184d12d8ba010493ee7b322864f7d3d00d3f9bb97d9c21de4068", size = 35042066, upload-time = "2026-02-23T00:21:31.358Z" }, - { url = "https://files.pythonhosted.org/packages/f9/df/18f80fb99df40b4070328d5ae5c596f2f00fffb50167e31439e932f29e7d/scipy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:08b900519463543aa604a06bec02461558a6e1cef8fdbb8098f77a48a83c8118", size = 37612763, upload-time = "2026-02-23T00:21:37.247Z" }, - { url = "https://files.pythonhosted.org/packages/4b/39/f0e8ea762a764a9dc52aa7dabcfad51a354819de1f0d4652b6a1122424d6/scipy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:3877ac408e14da24a6196de0ddcace62092bfc12a83823e92e49e40747e52c19", size = 37290984, upload-time = "2026-02-23T00:22:35.023Z" }, - { url = "https://files.pythonhosted.org/packages/7c/56/fe201e3b0f93d1a8bcf75d3379affd228a63d7e2d80ab45467a74b494947/scipy-1.17.1-cp314-cp314-win_arm64.whl", hash = "sha256:f8885db0bc2bffa59d5c1b72fad7a6a92d3e80e7257f967dd81abb553a90d293", size = 25192877, upload-time = "2026-02-23T00:22:39.798Z" }, - { url = "https://files.pythonhosted.org/packages/96/ad/f8c414e121f82e02d76f310f16db9899c4fcde36710329502a6b2a3c0392/scipy-1.17.1-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:1cc682cea2ae55524432f3cdff9e9a3be743d52a7443d0cba9017c23c87ae2f6", size = 31949750, upload-time = "2026-02-23T00:21:42.289Z" }, - { url = "https://files.pythonhosted.org/packages/7c/b0/c741e8865d61b67c81e255f4f0a832846c064e426636cd7de84e74d209be/scipy-1.17.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:2040ad4d1795a0ae89bfc7e8429677f365d45aa9fd5e4587cf1ea737f927b4a1", size = 28585858, upload-time = "2026-02-23T00:21:47.706Z" }, - { url = "https://files.pythonhosted.org/packages/ed/1b/3985219c6177866628fa7c2595bfd23f193ceebbe472c98a08824b9466ff/scipy-1.17.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:131f5aaea57602008f9822e2115029b55d4b5f7c070287699fe45c661d051e39", size = 20757723, upload-time = "2026-02-23T00:21:52.039Z" }, - { url = "https://files.pythonhosted.org/packages/c0/19/2a04aa25050d656d6f7b9e7b685cc83d6957fb101665bfd9369ca6534563/scipy-1.17.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9cdc1a2fcfd5c52cfb3045feb399f7b3ce822abdde3a193a6b9a60b3cb5854ca", size = 23043098, upload-time = "2026-02-23T00:21:56.185Z" }, - { url = "https://files.pythonhosted.org/packages/86/f1/3383beb9b5d0dbddd030335bf8a8b32d4317185efe495374f134d8be6cce/scipy-1.17.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e3dcd57ab780c741fde8dc68619de988b966db759a3c3152e8e9142c26295ad", size = 33030397, upload-time = "2026-02-23T00:22:01.404Z" }, - { url = "https://files.pythonhosted.org/packages/41/68/8f21e8a65a5a03f25a79165ec9d2b28c00e66dc80546cf5eb803aeeff35b/scipy-1.17.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a9956e4d4f4a301ebf6cde39850333a6b6110799d470dbbb1e25326ac447f52a", size = 35281163, upload-time = "2026-02-23T00:22:07.024Z" }, - { url = "https://files.pythonhosted.org/packages/84/8d/c8a5e19479554007a5632ed7529e665c315ae7492b4f946b0deb39870e39/scipy-1.17.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:a4328d245944d09fd639771de275701ccadf5f781ba0ff092ad141e017eccda4", size = 35116291, upload-time = "2026-02-23T00:22:12.585Z" }, - { url = "https://files.pythonhosted.org/packages/52/52/e57eceff0e342a1f50e274264ed47497b59e6a4e3118808ee58ddda7b74a/scipy-1.17.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a77cbd07b940d326d39a1d1b37817e2ee4d79cb30e7338f3d0cddffae70fcaa2", size = 37682317, upload-time = "2026-02-23T00:22:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/11/2f/b29eafe4a3fbc3d6de9662b36e028d5f039e72d345e05c250e121a230dd4/scipy-1.17.1-cp314-cp314t-win_amd64.whl", hash = "sha256:eb092099205ef62cd1782b006658db09e2fed75bffcae7cc0d44052d8aa0f484", size = 37345327, upload-time = "2026-02-23T00:22:24.442Z" }, - { url = "https://files.pythonhosted.org/packages/07/39/338d9219c4e87f3e708f18857ecd24d22a0c3094752393319553096b98af/scipy-1.17.1-cp314-cp314t-win_arm64.whl", hash = "sha256:200e1050faffacc162be6a486a984a0497866ec54149a01270adc8a59b7c7d21", size = 25489165, upload-time = "2026-02-23T00:22:29.563Z" }, -] - [[package]] name = "send2trash" version = "2.1.0" @@ -3667,18 +3489,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/c8/876602cbc96469911f0939f703453c1157b0c826ecb05bdd32e023397d4e/tornado-6.5.5-cp39-abi3-win_arm64.whl", hash = "sha256:2c9a876e094109333f888539ddb2de4361743e5d21eece20688e3e351e4990a6", size = 448016, upload-time = "2026-03-10T21:31:00.43Z" }, ] -[[package]] -name = "tqdm" -version = "4.67.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/09/a9/6ba95a270c6f1fbcd8dac228323f2777d886cb206987444e4bce66338dd4/tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb", size = 169598, upload-time = "2026-02-03T17:35:53.048Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" }, -] - [[package]] name = "traitlets" version = "5.14.3" From f0170aa2dc0bdcb097588dd97ff72c82ba2de937 Mon Sep 17 00:00:00 2001 From: Albert Yau <5298134+mwyau@users.noreply.github.com> Date: Tue, 24 Mar 2026 22:57:36 -0400 Subject: [PATCH 2/9] fix: resolve linting and mypy issues in spectral preprocessing --- src/pystormtracker/preprocessing/spectral.py | 25 +++++++++++--------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/src/pystormtracker/preprocessing/spectral.py b/src/pystormtracker/preprocessing/spectral.py index 9591a66f..fa8da337 100644 --- a/src/pystormtracker/preprocessing/spectral.py +++ b/src/pystormtracker/preprocessing/spectral.py @@ -142,7 +142,7 @@ def _filter_ducc0_frame( frame = frame[::-1, :] nlat, nlon = frame.shape - + # Regular equidistant grid geometry = "CC" mmax = min(lmax, nlon // 2 - 1) @@ -163,16 +163,19 @@ def _filter_ducc0_frame( mask = l_arr < lmin alm[0, mask] = 0.0 - out = ducc0.sht.synthesis_2d( - alm=alm, - spin=0, - lmax=lmax, - mmax=mmax, - ntheta=nlat, - nphi=nlon, - geometry=geometry, - nthreads=nthreads, - )[0] + out = cast( + NDArray[np.float64], + ducc0.sht.synthesis_2d( + alm=alm, + spin=0, + lmax=lmax, + mmax=mmax, + ntheta=nlat, + nphi=nlon, + geometry=geometry, + nthreads=nthreads, + )[0], + ) if lat_reverse: out = out[::-1, :] From b62392bdc41d59167d2d5b66985c222ca7c8270f Mon Sep 17 00:00:00 2001 From: Albert Yau <5298134+mwyau@users.noreply.github.com> Date: Tue, 24 Mar 2026 23:06:01 -0400 Subject: [PATCH 3/9] test: tighten spectral parity thresholds and add benchmark script --- benchmark/calculate_spectral_errors.py | 78 ++++++++++++++++++++++++++ tests/test_integration_spectral.py | 9 +-- 2 files changed, 83 insertions(+), 4 deletions(-) create mode 100644 benchmark/calculate_spectral_errors.py diff --git a/benchmark/calculate_spectral_errors.py b/benchmark/calculate_spectral_errors.py new file mode 100644 index 00000000..7ba50449 --- /dev/null +++ b/benchmark/calculate_spectral_errors.py @@ -0,0 +1,78 @@ + +import os +import numpy as np +import xarray as xr +from pystormtracker.preprocessing import SpectralFilter + +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +MSL_FILE = os.path.join(BASE_DIR, "data/test/era5/era5_msl_2025120100_2.5x2.5.nc") + +TEST_CASES = [ + { + "name": "T5-42", + "lmin": 5, + "lmax": 42, + "ref": os.path.join(BASE_DIR, "data/test/era5/era5_msl_2025120100_2.5x2.5_t5-42_ncl.nc"), + }, + { + "name": "T0-42", + "lmin": 0, + "lmax": 42, + "ref": os.path.join(BASE_DIR, "data/test/era5/era5_msl_2025120100_2.5x2.5_t0-42_ncl.nc"), + }, +] + +def calculate_stats(filtered, reference): + f = filtered.values.flatten().astype(np.float64) + r = reference.values.flatten().astype(np.float64) + + diff = f - r + rmse = np.sqrt(np.mean(diff**2)) + abs_err = np.mean(np.abs(diff)) + + # Relative error (using mean of absolute values of reference) + rel_err = rmse / np.mean(np.abs(r)) + + corr = np.corrcoef(f, r)[0, 1] + + return { + "rmse": rmse, + "abs_err": abs_err, + "rel_err": rel_err, + "corr": corr + } + +def main(): + if not os.path.exists(MSL_FILE): + print(f"Base MSL file not found: {MSL_FILE}") + return + + ds_msl = xr.open_dataset(MSL_FILE) + msl = ds_msl.msl + + engines = ["shtns", "ducc0"] + + header = f"{'Case':<10} {'Engine':<10} {'RMSE':<18} {'Abs Err':<18} {'Rel Err':<18} {'Corr':<18}" + print(header) + print("-" * len(header)) + + for case in TEST_CASES: + if not os.path.exists(case["ref"]): + print(f"Reference file not found for {case['name']}: {case['ref']}") + continue + + ds_ref = xr.open_dataset(case["ref"]) + ref = ds_ref.msl + + for engine in engines: + try: + filt = SpectralFilter(lmin=case["lmin"], lmax=case["lmax"], sht_engine=engine) + filtered = filt.filter(msl) + + stats = calculate_stats(filtered, ref) + print(f"{case['name']:<10} {engine:<10} {stats['rmse']:<18.8f} {stats['abs_err']:<18.8f} {stats['rel_err']:<18.8e} {stats['corr']:<18.12e}") + except Exception as e: + print(f"{case['name']:<10} {engine:<10} Error: {e}") + +if __name__ == "__main__": + main() diff --git a/tests/test_integration_spectral.py b/tests/test_integration_spectral.py index 4fc38ca7..e0889cea 100644 --- a/tests/test_integration_spectral.py +++ b/tests/test_integration_spectral.py @@ -69,16 +69,17 @@ def test_spectral_filter_era5_parity_integration(case: FilterTestCase) -> None: filt = SpectralFilter(lmin=case["lmin"], lmax=case["lmax"], sht_engine=engine) filtered = filt.filter(msl) - # Ensure high structural correlation (> 0.99) + # Ensure extremely high structural correlation (> 0.9999) corr = np.corrcoef(filtered.values.flatten(), ref.values.flatten())[0, 1] - assert corr > 0.99, ( + assert corr > 0.9999, ( f"Low correlation for {engine} (T{case['lmin']}-{case['lmax']}): {corr}" ) # Ensure RMSE is within acceptable bounds for large-scale field (MSL ~10^5) - # Note: RMSE is slightly higher for T0-42 as it includes low frequencies + # shtns: ~0.46 Pa RMSE vs NCL (legacy Spherepack) + # ducc0: ~0.05 Pa RMSE vs NCL (modern implementation consistency) rmse = np.sqrt(np.mean((filtered.values - ref.values) ** 2)) - max_rmse = 100.0 + max_rmse = 1.0 if engine == "shtns" else 0.1 assert rmse < max_rmse, ( f"High RMSE for {engine} (T{case['lmin']}-{case['lmax']}): {rmse}" ) From a118bccdc84e07c27f1c7cc4aeaeff14ecb6c681 Mon Sep 17 00:00:00 2001 From: Albert Yau <5298134+mwyau@users.noreply.github.com> Date: Tue, 24 Mar 2026 23:26:29 -0400 Subject: [PATCH 4/9] refactor: remove shtns from kinematics and tighten vodv test thresholds --- benchmark/calculate_spectral_errors.py | 4 +- benchmark/calculate_vodv_errors.py | 66 +++++++++++++++++ .../preprocessing/kinematics.py | 71 +++---------------- tests/test_integration_vorticity.py | 44 +----------- tests/test_kinematics.py | 2 +- 5 files changed, 79 insertions(+), 108 deletions(-) create mode 100644 benchmark/calculate_vodv_errors.py diff --git a/benchmark/calculate_spectral_errors.py b/benchmark/calculate_spectral_errors.py index 7ba50449..8f29ecea 100644 --- a/benchmark/calculate_spectral_errors.py +++ b/benchmark/calculate_spectral_errors.py @@ -4,7 +4,7 @@ import xarray as xr from pystormtracker.preprocessing import SpectralFilter -BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) MSL_FILE = os.path.join(BASE_DIR, "data/test/era5/era5_msl_2025120100_2.5x2.5.nc") TEST_CASES = [ @@ -70,7 +70,7 @@ def main(): filtered = filt.filter(msl) stats = calculate_stats(filtered, ref) - print(f"{case['name']:<10} {engine:<10} {stats['rmse']:<18.8f} {stats['abs_err']:<18.8f} {stats['rel_err']:<18.8e} {stats['corr']:<18.12e}") + print(f"{case['name']:<10} {engine:<10} {stats['rmse']:<18.8f} {stats['abs_err']:<18.8f} {stats['rel_err']:<18.12f} {stats['corr']:<18.12f}") except Exception as e: print(f"{case['name']:<10} {engine:<10} Error: {e}") diff --git a/benchmark/calculate_vodv_errors.py b/benchmark/calculate_vodv_errors.py new file mode 100644 index 00000000..6421f1d5 --- /dev/null +++ b/benchmark/calculate_vodv_errors.py @@ -0,0 +1,66 @@ +import os +import numpy as np +import xarray as xr +from pystormtracker.preprocessing.kinematics import apply_vort_div + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +WIND_FILE = os.path.join(BASE_DIR, "data/test/era5/era5_uv850_2025120100_2.5x2.5.nc") +VODIV_FILE = os.path.join(BASE_DIR, "data/test/era5/era5_vodv850_2025120100_2.5x2.5_ncl.nc") + +def calculate_stats(calc, ref): + c = calc.values.flatten().astype(np.float64) + r = ref.values.flatten().astype(np.float64) + + diff = c - r + rmse = np.sqrt(np.mean(diff**2)) + abs_err = np.mean(np.abs(diff)) + + # Relative error (using mean of absolute values of reference) + rel_err = rmse / np.mean(np.abs(r)) + + corr = np.corrcoef(c, r)[0, 1] + + return { + "rmse": rmse, + "abs_err": abs_err, + "rel_err": rel_err, + "corr": corr + } + +def main(): + if not os.path.exists(WIND_FILE) or not os.path.exists(VODIV_FILE): + print(f"Test data not found.") + return + + ds_uv = xr.open_dataset(WIND_FILE) + ds_ref = xr.open_dataset(VODIV_FILE) + + u, v = ds_uv.u, ds_uv.v + vo_ref, dv_ref = ds_ref.vo, ds_ref.dv + + engines = ["ducc0"] + + header = f"{'Variable':<10} {'Engine':<10} {'RMSE':<18} {'Abs Err':<18} {'Rel Err':<18} {'Corr':<18}" + print(header) + print("-" * len(header)) + + for engine in engines: + try: + div, vort = apply_vort_div(u, v, sht_engine=engine) + + # Vorticity + vo_stats = calculate_stats(vort, vo_ref) + if engine == engines[0]: + print(f"Ref Mean Abs (Vorticity): {np.mean(np.abs(vo_ref.values)):.8e}") + print(f"{'Vorticity':<10} {engine:<10} {vo_stats['rmse']:<18.8f} {vo_stats['abs_err']:<18.8f} {vo_stats['rel_err']:<18.12f} {vo_stats['corr']:<18.12f}") + + # Divergence + dv_stats = calculate_stats(div, dv_ref) + if engine == engines[0]: + print(f"Ref Mean Abs (Divergence): {np.mean(np.abs(dv_ref.values)):.8e}") + print(f"{'Diverg.':<10} {engine:<10} {dv_stats['rmse']:<18.8f} {dv_stats['abs_err']:<18.8f} {dv_stats['rel_err']:<18.12f} {dv_stats['corr']:<18.12f}") + except Exception as e: + print(f"Engine {engine} Error: {e}") + +if __name__ == "__main__": + main() diff --git a/src/pystormtracker/preprocessing/kinematics.py b/src/pystormtracker/preprocessing/kinematics.py index 5cdaa983..dd76fbf0 100644 --- a/src/pystormtracker/preprocessing/kinematics.py +++ b/src/pystormtracker/preprocessing/kinematics.py @@ -1,6 +1,5 @@ from __future__ import annotations -import threading from typing import Literal, TypedDict, overload import numpy as np @@ -19,11 +18,11 @@ class KinematicsKwargs(TypedDict, total=False): def _resolve_engine( - sht_engine: Literal["auto", "shtns", "ducc0"], -) -> Literal["shtns", "ducc0"]: + sht_engine: Literal["auto", "ducc0"], +) -> Literal["ducc0"]: """Resolves 'auto' engine to the best available backend.""" if sht_engine == "auto": - return "shtns" if SHTNS_AVAILABLE else "ducc0" + return "ducc0" return sht_engine @@ -34,30 +33,6 @@ def _resolve_engine( except ImportError: DUCC0_AVAILABLE = False -try: - import shtns # type: ignore[import-untyped] - - SHTNS_AVAILABLE = True -except ImportError: - SHTNS_AVAILABLE = False - -_thread_local = threading.local() - - -def _get_shtns_plan(nlat: int, nlon: int, lmax: int) -> shtns.sht: - if not hasattr(_thread_local, "cache"): - _thread_local.cache = {} - key = (nlat, nlon, lmax) - if key not in _thread_local.cache: - # SHTns can handle lmax beyond sampling theorem for analysis, - # but results might be aliased if not careful. - # NCL/Spherepack T42 uses lmax=42 for 73x144. - mmax = min(lmax, nlon // 2 - 1) - sh = shtns.sht(lmax, mmax, norm=shtns.sht_fourpi) - sh.set_grid(nlat, nlon, shtns.sht_reg_poles | shtns.SHT_PHI_CONTIGUOUS) - _thread_local.cache[key] = sh - return _thread_local.cache[key] - def compute_vort_div( u: NDArray[np.float64], @@ -66,7 +41,7 @@ def compute_vort_div( lmax: int | None = None, geometry: str = "CC", nthreads: int = 0, - sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", + sht_engine: Literal["auto", "ducc0"] = "auto", ) -> tuple[NDArray[np.float64], NDArray[np.float64]]: """ Computes spatial divergence and relative vorticity from u and v wind components. @@ -78,7 +53,7 @@ def compute_vort_div( lmax: Maximum spherical harmonic degree. If None, derived from ntheta. geometry: Grid geometry (for ducc0). Default 'CC'. nthreads: Number of threads (for ducc0). - sht_engine: Transform engine ('auto', 'shtns', 'ducc0'). + sht_engine: Transform engine ('auto', 'ducc0'). Returns: div: Divergence (ntheta, nphi) @@ -99,35 +74,7 @@ def compute_vort_div( # Resolve engine resolved_engine = _resolve_engine(sht_engine) - if resolved_engine == "shtns": - if not SHTNS_AVAILABLE: - raise ImportError("shtns is requested but not available.") - sh = _get_shtns_plan(ntheta, nphi, lmax) - # SHTns: spat_to_SHsphtor expects (v_theta, v_phi) - # v_theta = v, v_phi = u (points North and East) - v_theta = np.ascontiguousarray(v, dtype=np.float64) - v_phi = np.ascontiguousarray(u, dtype=np.float64) - # Returns S (divergence-like) and T (vorticity-like) coeffs - slm = np.zeros(sh.nlm, dtype=np.complex128) - tlm = np.zeros(sh.nlm, dtype=np.complex128) - sh.spat_to_SHsphtor(v_theta, v_phi, slm, tlm) - - # Scale to get div and vort coefficients - # SHTns returns coefficients s, t such that: - # V = sum (s Ylm_grad + t Ylm_curl) - # In meteorology: - # div = -l(l+1)/R * s - # vort = -l(l+1)/R * t - l_arr = sh.l - eigen = (l_arr * (l_arr + 1.0)) / R - div_lm = -slm * eigen - vort_lm = -tlm * eigen - - div = sh.synth(div_lm) - vort = sh.synth(vort_lm) - return div, vort - - elif resolved_engine == "ducc0": + if resolved_engine == "ducc0": if not DUCC0_AVAILABLE: raise ImportError("ducc0 is requested but not available.") @@ -195,7 +142,7 @@ def apply_vort_div( lmax: int | None = None, geometry: str = "CC", nthreads: int = 0, - sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", + sht_engine: Literal["auto", "ducc0"] = "auto", backend: Literal["serial", "mpi", "dask"] = "serial", ) -> tuple[xr.DataArray, xr.DataArray]: """ @@ -259,7 +206,7 @@ def __init__( R: float = R_EARTH_METERS, lmax: int | None = None, geometry: str = "CC", - sht_engine: Literal["auto", "shtns", "ducc0"] = "auto", + sht_engine: Literal["auto", "ducc0"] = "auto", ) -> None: """ Initialize the kinematics calculator. @@ -268,7 +215,7 @@ def __init__( R: Planetary radius in meters. lmax: Maximum spherical harmonic degree. geometry: Grid geometry ('CC', 'DH', etc.). - sht_engine: Transform engine ('auto', 'shtns', 'ducc0', 'shtools'). + sht_engine: Transform engine ('auto', 'ducc0'). """ self.R = R self.lmax = lmax diff --git a/tests/test_integration_vorticity.py b/tests/test_integration_vorticity.py index 8dcecb88..60d5197a 100644 --- a/tests/test_integration_vorticity.py +++ b/tests/test_integration_vorticity.py @@ -31,12 +31,9 @@ def test_vorticity_divergence_parity_integration() -> None: u, v = ds_uv.u, ds_uv.v vo_ref, dv_ref = ds_ref.vo, ds_ref.dv - engines: list[Literal["shtns", "ducc0"]] = ["shtns", "ducc0"] + engines: list[Literal["ducc0"]] = ["ducc0"] for engine in engines: - if engine == "shtns": - pytest.importorskip("shtns") - div, vort = apply_vort_div(u, v, sht_engine=engine) if engine == "ducc0": @@ -47,42 +44,3 @@ def test_vorticity_divergence_parity_integration() -> None: np.testing.assert_allclose( div.values, dv_ref.values, rtol=1e-12, atol=1e-12 ) - else: - # shtns uses different grid weights for regular grids, loose match - # but same order of magnitude and general pattern. - rmse_vo = np.sqrt(np.mean((vort.values - vo_ref.values) ** 2)) - assert rmse_vo < 5e-6 # Tightened from 1e-4, but allows for Fejer weights - - # Ensure high structural correlation with NCL reference - corr = np.corrcoef(vort.values.flatten(), vo_ref.values.flatten())[0, 1] - assert corr > 0.99 - - -@pytest.mark.skipif(not HAS_DATA, reason="Local integration test data not found.") -def test_vorticity_internal_consistency() -> None: - """ - Verifies that different backends are consistent with each other. - """ - ds_uv = xr.open_dataset(WIND_FILE) - u, v = ds_uv.u, ds_uv.v - - _, vort_ducc = apply_vort_div(u, v, sht_engine="ducc0") - - # Comparing ducc0 with shtns. - try: - import shtns # type: ignore[import-untyped] - - _ = shtns.sht(31, 31) # Test import - _, vort_shtns = apply_vort_div(u, v, sht_engine="shtns") - - # Ensure ducc0 and shtns are highly correlated - corr = np.corrcoef(vort_ducc.values.flatten(), vort_shtns.values.flatten())[ - 0, 1 - ] - assert corr > 0.99 - - # Ensure they are in the same ballpark (RMSE < 5e-6) - rmse = np.sqrt(np.mean((vort_ducc.values - vort_shtns.values) ** 2)) - assert rmse < 5e-6 - except ImportError: - pytest.skip("shtns not available for consistency check") diff --git a/tests/test_kinematics.py b/tests/test_kinematics.py index 4e0ca6a5..18cdc8af 100644 --- a/tests/test_kinematics.py +++ b/tests/test_kinematics.py @@ -110,7 +110,7 @@ def test_solid_body_rotation() -> None: div, vort = compute_vort_div(u, v, nthreads=1) # Divergence of solid body rotation should be very close to zero - np.testing.assert_allclose(div, 0, atol=1e-10) + np.testing.assert_allclose(div, 0, atol=1e-12) # Vorticity is non-zero assert np.max(np.abs(vort)) > 0 From 33ccce1eb2f3a89f0b8eb449edd0e5e1dd517a6c Mon Sep 17 00:00:00 2001 From: Albert Yau <5298134+mwyau@users.noreply.github.com> Date: Tue, 24 Mar 2026 23:33:57 -0400 Subject: [PATCH 5/9] refactor: unify spectral and kinematics, remove SHTOOLS mentions, and make ducc0 a mandatory dependency --- benchmark/calculate_vodv_errors.py | 66 ------------------- .../preprocessing/kinematics.py | 23 +------ src/pystormtracker/preprocessing/spectral.py | 5 +- 3 files changed, 4 insertions(+), 90 deletions(-) delete mode 100644 benchmark/calculate_vodv_errors.py diff --git a/benchmark/calculate_vodv_errors.py b/benchmark/calculate_vodv_errors.py deleted file mode 100644 index 6421f1d5..00000000 --- a/benchmark/calculate_vodv_errors.py +++ /dev/null @@ -1,66 +0,0 @@ -import os -import numpy as np -import xarray as xr -from pystormtracker.preprocessing.kinematics import apply_vort_div - -BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -WIND_FILE = os.path.join(BASE_DIR, "data/test/era5/era5_uv850_2025120100_2.5x2.5.nc") -VODIV_FILE = os.path.join(BASE_DIR, "data/test/era5/era5_vodv850_2025120100_2.5x2.5_ncl.nc") - -def calculate_stats(calc, ref): - c = calc.values.flatten().astype(np.float64) - r = ref.values.flatten().astype(np.float64) - - diff = c - r - rmse = np.sqrt(np.mean(diff**2)) - abs_err = np.mean(np.abs(diff)) - - # Relative error (using mean of absolute values of reference) - rel_err = rmse / np.mean(np.abs(r)) - - corr = np.corrcoef(c, r)[0, 1] - - return { - "rmse": rmse, - "abs_err": abs_err, - "rel_err": rel_err, - "corr": corr - } - -def main(): - if not os.path.exists(WIND_FILE) or not os.path.exists(VODIV_FILE): - print(f"Test data not found.") - return - - ds_uv = xr.open_dataset(WIND_FILE) - ds_ref = xr.open_dataset(VODIV_FILE) - - u, v = ds_uv.u, ds_uv.v - vo_ref, dv_ref = ds_ref.vo, ds_ref.dv - - engines = ["ducc0"] - - header = f"{'Variable':<10} {'Engine':<10} {'RMSE':<18} {'Abs Err':<18} {'Rel Err':<18} {'Corr':<18}" - print(header) - print("-" * len(header)) - - for engine in engines: - try: - div, vort = apply_vort_div(u, v, sht_engine=engine) - - # Vorticity - vo_stats = calculate_stats(vort, vo_ref) - if engine == engines[0]: - print(f"Ref Mean Abs (Vorticity): {np.mean(np.abs(vo_ref.values)):.8e}") - print(f"{'Vorticity':<10} {engine:<10} {vo_stats['rmse']:<18.8f} {vo_stats['abs_err']:<18.8f} {vo_stats['rel_err']:<18.12f} {vo_stats['corr']:<18.12f}") - - # Divergence - dv_stats = calculate_stats(div, dv_ref) - if engine == engines[0]: - print(f"Ref Mean Abs (Divergence): {np.mean(np.abs(dv_ref.values)):.8e}") - print(f"{'Diverg.':<10} {engine:<10} {dv_stats['rmse']:<18.8f} {dv_stats['abs_err']:<18.8f} {dv_stats['rel_err']:<18.12f} {dv_stats['corr']:<18.12f}") - except Exception as e: - print(f"Engine {engine} Error: {e}") - -if __name__ == "__main__": - main() diff --git a/src/pystormtracker/preprocessing/kinematics.py b/src/pystormtracker/preprocessing/kinematics.py index dd76fbf0..cfee8b2c 100644 --- a/src/pystormtracker/preprocessing/kinematics.py +++ b/src/pystormtracker/preprocessing/kinematics.py @@ -2,6 +2,7 @@ from typing import Literal, TypedDict, overload +import ducc0 # type: ignore[import-not-found] import numpy as np import xarray as xr from numpy.typing import NDArray @@ -17,23 +18,6 @@ class KinematicsKwargs(TypedDict, total=False): sht_engine: str -def _resolve_engine( - sht_engine: Literal["auto", "ducc0"], -) -> Literal["ducc0"]: - """Resolves 'auto' engine to the best available backend.""" - if sht_engine == "auto": - return "ducc0" - return sht_engine - - -try: - import ducc0 # type: ignore[import-not-found] - - DUCC0_AVAILABLE = True -except ImportError: - DUCC0_AVAILABLE = False - - def compute_vort_div( u: NDArray[np.float64], v: NDArray[np.float64], @@ -72,12 +56,9 @@ def compute_vort_div( lmax = ntheta - 1 # Resolve engine - resolved_engine = _resolve_engine(sht_engine) + resolved_engine = "ducc0" if sht_engine == "auto" else sht_engine if resolved_engine == "ducc0": - if not DUCC0_AVAILABLE: - raise ImportError("ducc0 is requested but not available.") - mmax = min(lmax, (nphi - 1) // 2) # Standard spectral derivation from wind components: diff --git a/src/pystormtracker/preprocessing/spectral.py b/src/pystormtracker/preprocessing/spectral.py index fa8da337..4af9e921 100644 --- a/src/pystormtracker/preprocessing/spectral.py +++ b/src/pystormtracker/preprocessing/spectral.py @@ -6,6 +6,7 @@ from collections.abc import Callable from typing import Literal, TypedDict, cast, overload +import ducc0 # type: ignore[import-not-found] import numpy as np import xarray as xr from numpy.typing import NDArray @@ -81,7 +82,7 @@ def _get_shtns_plan(nlat: int, nlon: int, lmax: int) -> shtns.sht: # NCL/Spherepack T42 uses lmax=42 for 73x144. mmax = min(lmax, nlon // 2 - 1) - # Use 4pi normalization to match standard SHTOOLS and Hodges TRACK. + # Use 4pi normalization to match NCL/Spherepack and Hodges TRACK. sh = shtns.sht(lmax, mmax, norm=shtns.sht_fourpi) # shtns.sht_reg_poles is the standard equidistant latitude grid including poles. # SHT_PHI_CONTIGUOUS matches standard NumPy/C row-major layout (nlat, nlon). @@ -136,8 +137,6 @@ def _filter_ducc0_frame( nthreads: int = 1, ) -> NDArray[np.float64]: """Filters a single 2D frame using ducc0.""" - import ducc0 # type: ignore[import-not-found] - if lat_reverse: frame = frame[::-1, :] From 8eaea79cc8b385d38114d52dc5d1ade455e32211 Mon Sep 17 00:00:00 2001 From: Albert Yau <5298134+mwyau@users.noreply.github.com> Date: Tue, 24 Mar 2026 23:34:01 -0400 Subject: [PATCH 6/9] chore: remove remaining benchmark script --- benchmark/calculate_spectral_errors.py | 78 -------------------------- 1 file changed, 78 deletions(-) delete mode 100644 benchmark/calculate_spectral_errors.py diff --git a/benchmark/calculate_spectral_errors.py b/benchmark/calculate_spectral_errors.py deleted file mode 100644 index 8f29ecea..00000000 --- a/benchmark/calculate_spectral_errors.py +++ /dev/null @@ -1,78 +0,0 @@ - -import os -import numpy as np -import xarray as xr -from pystormtracker.preprocessing import SpectralFilter - -BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -MSL_FILE = os.path.join(BASE_DIR, "data/test/era5/era5_msl_2025120100_2.5x2.5.nc") - -TEST_CASES = [ - { - "name": "T5-42", - "lmin": 5, - "lmax": 42, - "ref": os.path.join(BASE_DIR, "data/test/era5/era5_msl_2025120100_2.5x2.5_t5-42_ncl.nc"), - }, - { - "name": "T0-42", - "lmin": 0, - "lmax": 42, - "ref": os.path.join(BASE_DIR, "data/test/era5/era5_msl_2025120100_2.5x2.5_t0-42_ncl.nc"), - }, -] - -def calculate_stats(filtered, reference): - f = filtered.values.flatten().astype(np.float64) - r = reference.values.flatten().astype(np.float64) - - diff = f - r - rmse = np.sqrt(np.mean(diff**2)) - abs_err = np.mean(np.abs(diff)) - - # Relative error (using mean of absolute values of reference) - rel_err = rmse / np.mean(np.abs(r)) - - corr = np.corrcoef(f, r)[0, 1] - - return { - "rmse": rmse, - "abs_err": abs_err, - "rel_err": rel_err, - "corr": corr - } - -def main(): - if not os.path.exists(MSL_FILE): - print(f"Base MSL file not found: {MSL_FILE}") - return - - ds_msl = xr.open_dataset(MSL_FILE) - msl = ds_msl.msl - - engines = ["shtns", "ducc0"] - - header = f"{'Case':<10} {'Engine':<10} {'RMSE':<18} {'Abs Err':<18} {'Rel Err':<18} {'Corr':<18}" - print(header) - print("-" * len(header)) - - for case in TEST_CASES: - if not os.path.exists(case["ref"]): - print(f"Reference file not found for {case['name']}: {case['ref']}") - continue - - ds_ref = xr.open_dataset(case["ref"]) - ref = ds_ref.msl - - for engine in engines: - try: - filt = SpectralFilter(lmin=case["lmin"], lmax=case["lmax"], sht_engine=engine) - filtered = filt.filter(msl) - - stats = calculate_stats(filtered, ref) - print(f"{case['name']:<10} {engine:<10} {stats['rmse']:<18.8f} {stats['abs_err']:<18.8f} {stats['rel_err']:<18.12f} {stats['corr']:<18.12f}") - except Exception as e: - print(f"{case['name']:<10} {engine:<10} Error: {e}") - -if __name__ == "__main__": - main() From e28784d559021d0ebc7b6b8498abcf3aa9a2ad34 Mon Sep 17 00:00:00 2001 From: Albert Yau <5298134+mwyau@users.noreply.github.com> Date: Tue, 24 Mar 2026 23:40:42 -0400 Subject: [PATCH 7/9] refactor: prefer ducc0 as default spectral engine to ensure consistency and silence SHTns warnings --- src/pystormtracker/preprocessing/spectral.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pystormtracker/preprocessing/spectral.py b/src/pystormtracker/preprocessing/spectral.py index 4af9e921..7116fbc9 100644 --- a/src/pystormtracker/preprocessing/spectral.py +++ b/src/pystormtracker/preprocessing/spectral.py @@ -33,7 +33,7 @@ def _resolve_engine( ) -> Literal["shtns", "ducc0"]: """Resolves 'auto' engine to the best available backend.""" if sht_engine == "auto": - return "shtns" if SHTNS_AVAILABLE else "ducc0" + return "ducc0" return sht_engine From 82037d82d31750139d581c44d4357f82f0e6e3a2 Mon Sep 17 00:00:00 2001 From: Albert Yau <5298134+mwyau@users.noreply.github.com> Date: Tue, 24 Mar 2026 23:55:40 -0400 Subject: [PATCH 8/9] docs(preprocessing): add technical documentation for spherical harmonic transforms and tapering --- .../preprocessing/kinematics.py | 25 +++++++++++------ src/pystormtracker/preprocessing/spectral.py | 27 +++++++++++++------ src/pystormtracker/preprocessing/taper.py | 18 ++++++++++--- 3 files changed, 51 insertions(+), 19 deletions(-) diff --git a/src/pystormtracker/preprocessing/kinematics.py b/src/pystormtracker/preprocessing/kinematics.py index cfee8b2c..8e8d8ac8 100644 --- a/src/pystormtracker/preprocessing/kinematics.py +++ b/src/pystormtracker/preprocessing/kinematics.py @@ -59,18 +59,25 @@ def compute_vort_div( resolved_engine = "ducc0" if sht_engine == "auto" else sht_engine if resolved_engine == "ducc0": + # mmax calculation follows the sampling theorem: for a longitude grid of + # size nphi, the maximum resolvable wavenumber m is (nphi-1)//2 to avoid + # aliasing. mmax = min(lmax, (nphi - 1) // 2) # Standard spectral derivation from wind components: - # V = (u, v) + # For a vector field V = (u, v) on a sphere: # div = 1/(R cos lat) * (du/dlon + d(v cos lat)/dlat) # vo = 1/(R cos lat) * (dv/dlon - d(u cos lat)/dlat) - # In spectral space, for a vector field expanded into E/B modes: - # div_lm = -sqrt(l(l+1))/R * E_lm - # vort_lm = sqrt(l(l+1))/R * B_lm - - # Analysis - # v_theta = v, v_phi = u (matches NCL Spherepack convention) + # + # In spherical harmonic space, using E (divergence-like) and + # B (vorticity-like) modes: + # div_lm = -[sqrt(l(l+1))/R] * E_lm + # vort_lm = [sqrt(l(l+1))/R] * B_lm + # where the [sqrt(l(l+1))/R] term is the eigenvalue of the vector Laplacian. + # Analysis: + # ducc0.sht.analysis_2d expects (v_theta, v_phi) for spin-1 (vector) fields. + # v_theta (meridional) is v, v_phi (zonal) is u. + # This matches the convention used in NCL's Spherepack wrappers. vec_map = np.stack((v, u), axis=0).astype(np.float64) alm_vec = ducc0.sht.analysis_2d( map=vec_map, @@ -83,7 +90,9 @@ def compute_vort_div( alm_E = alm_vec[0] alm_B = alm_vec[1] - # Spectral Scaling + # Spectral Scaling: + # We apply the eigenvalue of the gradient/curl operators in spectral space. + # l_arr contains the degree 'l' for each coefficient in the alm array. l_arr = np.concatenate([np.arange(m, lmax + 1) for m in range(mmax + 1)]) eigen_scale = np.sqrt(l_arr * (l_arr + 1.0)) / R alm_div = -eigen_scale * alm_E diff --git a/src/pystormtracker/preprocessing/spectral.py b/src/pystormtracker/preprocessing/spectral.py index 7116fbc9..b27de738 100644 --- a/src/pystormtracker/preprocessing/spectral.py +++ b/src/pystormtracker/preprocessing/spectral.py @@ -77,14 +77,17 @@ def _get_shtns_plan(nlat: int, nlon: int, lmax: int) -> shtns.sht: key = (nlat, nlon, lmax) if key not in _thread_local.cache: - # SHTns can handle lmax beyond sampling theorem for analysis, - # but results might be aliased if not careful. - # NCL/Spherepack T42 uses lmax=42 for 73x144. + # mmax is derived from the longitude grid resolution to satisfy the + # sampling theorem (mmax < nlon/2). mmax = min(lmax, nlon // 2 - 1) - # Use 4pi normalization to match NCL/Spherepack and Hodges TRACK. + # Use 4pi normalization (shtns.sht_fourpi) to ensure parity with + # NCL/Spherepack and Hodges TRACK. This ensures coefficients are + # scaled such that they represent the amplitude of the harmonics. sh = shtns.sht(lmax, mmax, norm=shtns.sht_fourpi) - # shtns.sht_reg_poles is the standard equidistant latitude grid including poles. + + # shtns.sht_reg_poles is the standard equidistant latitude grid + # including poles, which is the most common format for climate data (e.g. ERA5). # SHT_PHI_CONTIGUOUS matches standard NumPy/C row-major layout (nlat, nlon). sh.set_grid(nlat, nlon, shtns.sht_reg_poles | shtns.SHT_PHI_CONTIGUOUS) _thread_local.cache[key] = sh @@ -100,6 +103,7 @@ def _filter_shtns_frame( frame = frame[::-1, :] nlat, nlon = frame.shape + # Basic check against the sampling theorem limit for the latitude grid. grid_lmax = (nlat - 1) // 2 if lmin > grid_lmax: raise ValueError( @@ -116,7 +120,7 @@ def _filter_shtns_frame( # Forward transform (Spatial -> Spectral) ylm = sh.analys(frame) - # Apply Bandpass Mask + # Apply Bandpass Mask (Zero out coefficients outside [lmin, lmax]) mask = (sh.l < lmin) | (sh.l > lmax) ylm[mask] = 0.0 @@ -142,7 +146,9 @@ def _filter_ducc0_frame( nlat, nlon = frame.shape - # Regular equidistant grid + # geometry='CC' (Clenshaw-Curtis) assumes an equidistant grid including + # the poles, matching standard lat-lon climate data. + # For Gaussian grids, 'GL' (Gauss-Legendre) would be more appropriate. geometry = "CC" mmax = min(lmax, nlon // 2 - 1) @@ -157,6 +163,7 @@ def _filter_ducc0_frame( ) # Apply Bandpass Mask + # Coefficients are stored in a packed format: for each m, l goes from m to lmax. l_arr = np.concatenate([np.arange(m, lmax + 1) for m in range(mmax + 1)]) if lmin > 0: mask = l_arr < lmin @@ -188,7 +195,9 @@ def _filter_ducc0_frame( class SpectralFilter: """ Spectral bandpass filter (truncation) for lat-lon grid data. - Backends (in order of preference): shtns, ducc0. + Backends (in order of preference): + - shtns: Highly optimized for performance, requires C library installation. + - ducc0: Pure Python/C++ library with no external C dependencies, easy installation. """ def __init__( @@ -206,6 +215,8 @@ def __init__( lmax (int): Maximum total wave number to retain. lat_reverse (bool): If True, assume latitude is stored from South to North. sht_engine (str): Engine to use ('auto', 'shtns', 'ducc0'). + - 'shtns' is preferred for high-performance iterative filters. + - 'ducc0' is used for robust, thread-safe transforms. """ self.lmin = lmin self.lmax = lmax diff --git a/src/pystormtracker/preprocessing/taper.py b/src/pystormtracker/preprocessing/taper.py index dc2b7b33..b133500d 100644 --- a/src/pystormtracker/preprocessing/taper.py +++ b/src/pystormtracker/preprocessing/taper.py @@ -8,8 +8,12 @@ class TaperFilter: """ A filter that applies a cosine taper to the edges of the domain. + This is often used in TRACK before spherical harmonic filtering to - minimize ringing at the boundaries. + minimize the Gibbs phenomenon (ringing artifacts) at the boundaries. + Since spherical harmonic transforms effectively assume periodicity or + symmetry, abrupt changes at the boundaries (e.g. between the North + Pole and the first latitude row) can introduce high-frequency noise. """ def __init__(self, n_points: int = 10) -> None: @@ -18,6 +22,8 @@ def __init__(self, n_points: int = 10) -> None: Args: n_points (int): Number of points to taper at the edges. + Higher values provide smoother transitions but sacrifice + more of the physical data at the boundaries. """ self.n_points = n_points @@ -89,7 +95,12 @@ def _filter_xarray(self, data: xr.DataArray) -> xr.DataArray: return data * mask def _get_taper(self, n: int) -> NDArray[np.float64]: - """Generates a 1D cosine taper vector.""" + """ + Generates a 1D cosine taper vector. + + The taper uses a raised cosine (Hanning-like) window at the + boundaries to smoothly transition from 0 to 1. + """ taper = np.ones(n, dtype=np.float64) if self.n_points <= 0: return taper @@ -97,7 +108,8 @@ def _get_taper(self, n: int) -> NDArray[np.float64]: # Ensure n_points doesn't exceed half the dimension size n_eff = min(self.n_points, n // 2) - # Cosine taper from 0 to 1 over n_eff points + # Cosine taper from 0 to 1 over n_eff points: + # w = 0.5 * (1 - cos(pi * i / n_eff)) weights = 0.5 * (1.0 - np.cos(np.pi * np.arange(n_eff) / n_eff)) taper[:n_eff] = weights From adb3819d643e752614d58248042aba63b2459cfd Mon Sep 17 00:00:00 2001 From: Albert Yau <5298134+mwyau@users.noreply.github.com> Date: Wed, 25 Mar 2026 00:08:33 -0400 Subject: [PATCH 9/9] build(mypy): globally ignore missing imports for ducc0 --- pyproject.toml | 2 +- src/pystormtracker/preprocessing/kinematics.py | 2 +- src/pystormtracker/preprocessing/spectral.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 233a18f3..5d88eb9b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -123,7 +123,7 @@ disallow_untyped_calls = false exclude = ["worktrees", ".venv", "notebooks"] [[tool.mypy.overrides]] -module = ["dask.*", "numba.*"] +module = ["dask.*", "numba.*", "ducc0", "ducc0.*"] ignore_missing_imports = true [tool.pytest.ini_options] diff --git a/src/pystormtracker/preprocessing/kinematics.py b/src/pystormtracker/preprocessing/kinematics.py index 8e8d8ac8..c7f944f1 100644 --- a/src/pystormtracker/preprocessing/kinematics.py +++ b/src/pystormtracker/preprocessing/kinematics.py @@ -2,7 +2,7 @@ from typing import Literal, TypedDict, overload -import ducc0 # type: ignore[import-not-found] +import ducc0 import numpy as np import xarray as xr from numpy.typing import NDArray diff --git a/src/pystormtracker/preprocessing/spectral.py b/src/pystormtracker/preprocessing/spectral.py index b27de738..a5add1ec 100644 --- a/src/pystormtracker/preprocessing/spectral.py +++ b/src/pystormtracker/preprocessing/spectral.py @@ -6,7 +6,7 @@ from collections.abc import Callable from typing import Literal, TypedDict, cast, overload -import ducc0 # type: ignore[import-not-found] +import ducc0 import numpy as np import xarray as xr from numpy.typing import NDArray