diff --git a/src/opera_utils/_cmr.py b/src/opera_utils/_cmr.py new file mode 100644 index 0000000..79bc5d6 --- /dev/null +++ b/src/opera_utils/_cmr.py @@ -0,0 +1,124 @@ +# opera_utils/tropo.py +from __future__ import annotations + +import logging +from collections.abc import Iterable +from datetime import datetime, timezone +from enum import Enum +from typing import Any, Literal + +import requests + +logger = logging.getLogger("opera_utils") + +# CMR short name for TROPO v1 +TROPO_SHORT_NAME = "OPERA_L4_TROPO-ZENITH_V1" + + +Kind = Literal[ + "GET DATA", + "GET DATA VIA DIRECT ACCESS", + "EXTENDED METADATA", + "GET RELATED VISUALIZATION", + "VIEW RELATED INFORMATION", # s3 temporary credential url +] + + +class UrlType(str, Enum): + """Preferred data access protocol.""" + + HTTPS = "https" + S3 = "s3" + + def __str__(self) -> str: + return self.value + + +def _parse_dt(s: str) -> datetime: + # CMR returns timestamps like "2016-07-01T00:00:00Z" + return datetime.strptime(s, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc) + + +def _cmr_search( + *, + short_name: str, + start_datetime: datetime | None = None, + end_datetime: datetime | None = None, + attributes: Iterable[str] | None = None, + use_uat: bool = False, +) -> list[dict[str, Any]]: + """Query the CMR for granules matching the product type and date range. + + Parameters + ---------- + short_name : str + Name of CMR data set. + start_datetime : datetime, optional + The start of the temporal range in UTC. + end_datetime : datetime, optional + The end of the temporal range in UTC. + attributes : Iterable[str], optional + Filters to use in the CMR query. + use_uat : bool + Whether to use the UAT environment instead of main Earthdata endpoint. + + Returns + ------- + list + raw UMM granule dicts for a collection. + + """ + edl_host = "uat.earthdata" if use_uat else "earthdata" + base = f"https://cmr.{edl_host}.nasa.gov/search/granules.umm_json" + page_size: int = 500 + + params: dict[str, Any] = {"short_name": short_name, "page_size": page_size} + if attributes: + params["attribute[]"] = list(attributes) + + if start_datetime or end_datetime: + # Let CMR do initial temporal filtering + start_str = start_datetime.isoformat() if start_datetime else "" + end_str = end_datetime.isoformat() if end_datetime else "" + params["temporal"] = f"{start_str},{end_str}" + + headers: dict[str, str] = {} + out: list[dict[str, Any]] = [] + + while True: + resp = requests.get(base, params=params, headers=headers, timeout=60) + resp.raise_for_status() + payload = resp.json() + items = payload.get("items", []) + out.extend([it.get("umm", {}) for it in items]) + + # pagination + sa = resp.headers.get("CMR-Search-After") + if not sa: + break + headers["CMR-Search-After"] = sa + + return out + + +def _pick_related_url( + umm: dict[str, Any], + *, + kind: Kind, + startswith: str | None = None, + endswith: str | None = None, +) -> str | None: + for item in umm.get("RelatedUrls", []) or []: + if not isinstance(item, dict): + continue + if item.get("Type") != kind: + continue + url = item.get("URL") + if not url: + continue + if startswith and not url.startswith(startswith): + continue + if endswith and not url.endswith(endswith): + continue + return url + return None diff --git a/src/opera_utils/_cslc.py b/src/opera_utils/_cslc.py index a5f8ac3..131ae87 100644 --- a/src/opera_utils/_cslc.py +++ b/src/opera_utils/_cslc.py @@ -17,8 +17,6 @@ from shapely import geometry, ops, wkt try: - from isce3.core import DateTime, Orbit, StateVector - HAS_ICE3 = True except ImportError: HAS_ICE3 = False @@ -388,9 +386,11 @@ def get_cslc_orbit(h5file: Filename): Orbit object. """ - if not HAS_ICE3: + try: + from isce3.core import DateTime, Orbit, StateVector # noqa: PLC0415 + except ImportError as e: msg = "isce3 must be installed to use this function" - raise ImportError(msg) + raise ImportError(msg) from e times, positions, velocities, reference_epoch = get_orbit_arrays(h5file) orbit_svs = [] diff --git a/src/opera_utils/cli.py b/src/opera_utils/cli.py index 6ce98cf..f592131 100644 --- a/src/opera_utils/cli.py +++ b/src/opera_utils/cli.py @@ -151,10 +151,10 @@ def cli_app() -> None: try: from opera_utils.disp._download import run_download from opera_utils.disp._reformat import reformat_stack - from opera_utils.disp._search import search + from opera_utils.disp._search import search as search_disp cli_dict["disp-s1-download"] = run_download - cli_dict["disp-s1-search"] = partial(search, print_urls=True) + cli_dict["disp-s1-search"] = partial(search_disp, print_urls=True) cli_dict["disp-s1-reformat"] = reformat_stack except ImportError: @@ -163,7 +163,9 @@ def cli_app() -> None: try: from opera_utils.tropo._apply import apply_tropo from opera_utils.tropo._crop import crop_tropo + from opera_utils.tropo._search import search as search_tropo + cli_dict["tropo-search"] = search_tropo cli_dict["tropo-crop"] = crop_tropo cli_dict["tropo-apply"] = apply_tropo diff --git a/src/opera_utils/disp/_product.py b/src/opera_utils/disp/_product.py index 5ec7647..077e056 100644 --- a/src/opera_utils/disp/_product.py +++ b/src/opera_utils/disp/_product.py @@ -5,7 +5,6 @@ from collections.abc import Iterable, Iterator from dataclasses import asdict, dataclass from datetime import datetime, timezone -from enum import Enum from functools import cached_property from math import nan from pathlib import Path @@ -18,6 +17,7 @@ from affine import Affine from typing_extensions import Self +from opera_utils._cmr import UrlType from opera_utils.burst_frame_db import ( Bbox, OrbitPass, @@ -29,17 +29,7 @@ from ._utils import get_frame_coordinates -__all__ = ["DispProduct", "DispProductStack", "UrlType"] - - -class UrlType(str, Enum): - """Choices for the orbit direction of a granule.""" - - S3 = "s3" - HTTPS = "https" - - def __str__(self) -> str: - return str(self.value) +__all__ = ["DispProduct", "DispProductStack"] @dataclass diff --git a/src/opera_utils/disp/_search.py b/src/opera_utils/disp/_search.py index 711fd85..e53db42 100644 --- a/src/opera_utils/disp/_search.py +++ b/src/opera_utils/disp/_search.py @@ -14,8 +14,7 @@ import warnings from datetime import datetime, timezone -import requests - +from opera_utils._cmr import _cmr_search from opera_utils.disp._product import DispProduct, UrlType __all__ = ["search"] @@ -59,8 +58,6 @@ def search( List of products matching the search criteria """ - edl_host = "uat.earthdata" if use_uat else "earthdata" - search_url = f"https://cmr.{edl_host}.nasa.gov/search/granules.umm_json" params: dict[str, int | str | list[str]] = { "short_name": "OPERA_L3_DISP-S1_V1", "page_size": 500, @@ -93,32 +90,18 @@ def search( product_filters.append(f"int,FRAME_NUMBER,{frame_id}") else: warnings.warn("No `frame_id` specified: search may be large", stacklevel=1) - - headers: dict[str, str] = {} - products: list[DispProduct] = [] - while True: - response = requests.get(search_url, params=params, headers=headers) - response.raise_for_status() - data = response.json() - cur_products = [ - DispProduct.from_umm(item["umm"], url_type=url_type) - for item in data["items"] - ] - # CMR filters apply to both the reference and secondary time (as of 2025-03-29) - # We want to filter just by the secondary time - products.extend( - [ - g - for g in cur_products - if start_datetime <= g.secondary_datetime <= end_datetime - ] - ) - - if "CMR-Search-After" not in response.headers: - break - - headers["CMR-Search-After"] = response.headers["CMR-Search-After"] - + results = _cmr_search( + short_name="OPERA_L3_DISP-S1_V1", + start_datetime=start_datetime, + end_datetime=end_datetime, + attributes=product_filters, + use_uat=use_uat, + ) + products = [DispProduct.from_umm(r, url_type=url_type) for r in results] + + products = [ + p for p in products if start_datetime <= p.secondary_datetime <= end_datetime + ] # Return sorted list of products products = sorted(products, key=lambda g: (g.frame_id, g.secondary_datetime)) if print_urls: diff --git a/src/opera_utils/disp/plate_motion.py b/src/opera_utils/disp/plate_motion.py new file mode 100644 index 0000000..a8eb94a --- /dev/null +++ b/src/opera_utils/disp/plate_motion.py @@ -0,0 +1,320 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "numpy", +# "pyproj", +# "rioxarray", +# "tyro", +# ] +# /// +"""Stand-alone plate-motion utilities and CLI. + +This module computes rigid plate-motion velocities at geodetic coordinates using +an Euler pole, projecting to the line-of-sight (LOS) given an east/north/up raster. + +Conventions +----------- +- Angular velocity: + * Cartesian: (wx, wy, wz) in milliarcseconds/year (mas/yr) + * Spherical: (lat [deg], lon [deg], omega [deg/Ma]) +- Coordinates: + * Geodetic latitude/longitude are in degrees (WGS84 ellipsoid). + * Altitude is meters above ellipsoid (default 0 m). +- ENU frame: + * East, North, Up (local topocentric) at each (lat, lon). +- LOS projection: + * The LOS unit vector points **from ground toward the sensor**. + +Examples +-------- + uv run --script plate_motion.py \ + --los-enu los_enu.tif + --plate-name NorthAmerica \ + --out plate_motion_in_los.tif + + +References +---------- +Mintpy Euler Pole and plate_motion scripts: +https://github.com/insarlab/MintPy/blob/3fcf395b9ebaf6a9b12dc5d29d175fb0fa649e9f/src/mintpy/plate_motion.py +Original authors: Yuan-Kai Liu, Zhang Yunjun, May 2022 +Stephenson, O. L., Liu, Y. K., Yunjun, Z., Simons, M., Rosen, P. and Xu, X., (2022), + The Impact of Plate Motions on Long-Wavelength InSAR-Derived Velocity Fields, + Geophys. Res. Lett. 49, e2022GL099835, doi:10.1029/2022GL099835. + +""" + +from __future__ import annotations + +from dataclasses import dataclass +from enum import StrEnum +from pathlib import Path + +import numpy as np +import pyproj +import rioxarray as rxr +import tyro + +MAS_TO_RAD = np.pi / (180.0 * 3_600_000.0) # 1 mas = this many radians +DEGMA_TO_MASPYR = 1_000_000.0 / 3_600_000.0 # deg/Ma -> mas/yr +WGS84 = pyproj.CRS.from_epsg(4979) # geodetic 3D +ECEF = pyproj.CRS.from_epsg(4978) # ECEF + + +@dataclass(frozen=True) +class PlateITRF2014: + """ITRF2014 plate angular velocity in mas/yr.""" + + abbrev: str + omega_x: float + omega_y: float + omega_z: float + + +# Table from Altamimi et al. (2017) +ITRF2014_PMM: dict[str, PlateITRF2014] = { + "antartica": PlateITRF2014("ANTA", -0.248, -0.324, 0.675), + "arabia": PlateITRF2014("ARAB", 1.154, -0.136, 1.444), + "australia": PlateITRF2014("AUST", 1.510, 1.182, 1.215), + "eurasia": PlateITRF2014("EURA", -0.085, -0.531, 0.770), + "india": PlateITRF2014("INDI", 1.154, -0.005, 1.454), + "nazca": PlateITRF2014("NAZC", -0.333, -1.544, 1.623), + "northamerica": PlateITRF2014("NOAM", 0.024, -0.694, -0.063), + "nubia": PlateITRF2014("NUBI", 0.099, -0.614, 0.733), + "pacific": PlateITRF2014("PCFC", -0.409, 1.047, -2.169), + "southamerica": PlateITRF2014("SOAM", -0.270, -0.301, -0.140), + "somalia": PlateITRF2014("SOMA", -0.121, -0.794, 0.884), +} +PLATE_NAMES = list(ITRF2014_PMM.keys()) + + +PlateName = StrEnum("PlateName", PLATE_NAMES) # type: ignore[misc] + + +def cart2sph( + rx: np.ndarray, ry: np.ndarray, rz: np.ndarray +) -> tuple[np.ndarray, np.ndarray, np.ndarray]: + """Cartesian → spherical. + + Parameters + ---------- + rx, ry, rz : array_like + Components in an arbitrary radial unit. + + Returns + ------- + lat_deg, lon_deg, r : ndarray + Latitude [deg], longitude [deg], radius [same unit as inputs]. + + """ + r = np.sqrt(rx * rx + ry * ry + rz * rz) + lat = np.rad2deg(np.arcsin(rz / r)) + lon = np.rad2deg(np.arctan2(ry, rx)) + return lat, lon, r + + +def sph2cart( + lat_deg: np.ndarray, lon_deg: np.ndarray, r: np.ndarray | float = 1.0 +) -> tuple[np.ndarray, np.ndarray, np.ndarray]: + """Spherical → Cartesian. + + Parameters + ---------- + lat_deg, lon_deg : array_like + Latitude/longitude in degrees. + r : array_like or float, default=1 + Radius in any unit. + + Returns + ------- + rx, ry, rz : ndarray + Cartesian components with same unit as r. + + """ + lat = np.deg2rad(lat_deg) + lon = np.deg2rad(lon_deg) + clat = np.cos(lat) + return ( + r * clat * np.cos(lon), + r * clat * np.sin(lon), + r * np.sin(lat), + ) + + +def geodetic_to_ecef( + lat_deg: np.ndarray, + lon_deg: np.ndarray, + alt_m: np.ndarray | float = 0, +) -> tuple[np.ndarray, np.ndarray, np.ndarray]: + """WGS84 geodetic (lat, lon, h) → ECEF (x, y, z), in meters.""" + transformer = pyproj.Transformer.from_crs(WGS84, ECEF, always_xy=True) + x, y, z = transformer.transform( + np.radians(lon_deg), np.radians(lat_deg), np.broadcast_to(alt_m, lon_deg.shape) + ) + return np.asarray(x), np.asarray(y), np.asarray(z) + + +def ecef_to_enu_components( + lat_deg: np.ndarray, + lon_deg: np.ndarray, + x: np.ndarray, + y: np.ndarray, + z: np.ndarray, +) -> tuple[np.ndarray, np.ndarray, np.ndarray]: + """Rotate ECEF vectors into ENU at each (lat, lon). + + Parameters + ---------- + lat_deg, lon_deg : array_like + Geodetic latitude/longitude [deg]. + x, y, z : array_like + ECEF vector components (e.g., velocity) with consistent units. + + Returns + ------- + e, n, u : ndarray + ENU components in same units as inputs. + + """ + lat = np.deg2rad(lat_deg) + lon = np.deg2rad(lon_deg) + slat, clat = np.sin(lat), np.cos(lat) + slon, clon = np.sin(lon), np.cos(lon) + + e = -slon * x + clon * y + n = -slat * clon * x - slat * slon * y + clat * z + u = clat * clon * x + clat * slon * y + slat * z + return e, n, u + + +@dataclass +class EulerPole: + """Euler pole / angular velocity. + + Parameters + ---------- + wx_mas_yr, wy_mas_yr, wz_mas_yr + Cartesian angular velocity components in mas/yr. + + Notes + ----- + Internally we store Cartesian mas/yr. + + """ + + wx_mas_yr: float + wy_mas_yr: float + wz_mas_yr: float + + def velocity_xyz_m_per_yr( + self, + lat_deg: np.ndarray, + lon_deg: np.ndarray, + alt_m: float | np.ndarray | None = None, + ) -> tuple[np.ndarray, np.ndarray, np.ndarray]: + """ECEF velocity induced by Euler rotation at positions. + + Returns + ------- + vx, vy, vz : ndarray + ECEF velocity components [m/yr]. + + """ + omega_rad_per_yr = ( + np.asarray([self.wx_mas_yr, self.wy_mas_yr, self.wz_mas_yr], dtype=float) + * MAS_TO_RAD + ) + + x, y, z = geodetic_to_ecef(lat_deg, lon_deg, alt_m) + xyz = np.stack([x, y, z], axis=0) # (3, N) + # Cross product omega x r for each point + _, rows, cols = xyz.shape + vx, vy, vz = np.cross( + omega_rad_per_yr[:, None], xyz.reshape(3, -1), axis=0 + ).reshape(3, rows, cols) + return vx, vy, vz + + def velocity_enu_m_per_yr( + self, lat_deg: np.ndarray, lon_deg: np.ndarray, alt_m: float | np.ndarray = 0.0 + ) -> tuple[np.ndarray, np.ndarray, np.ndarray]: + """ENU velocity induced by Euler rotation at positions. + + Returns + ------- + ve, vn, vu : ndarray + East/North/Up velocities [m/yr]. + + """ + vx, vy, vz = self.velocity_xyz_m_per_yr(lat_deg, lon_deg, alt_m) + return ecef_to_enu_components(lat_deg, lon_deg, vx, vy, vz) + + +def _build_euler_from_inputs(plate_name: PlateName) -> EulerPole: + """Construct EulerPole from CLI options.""" + plate = ITRF2014_PMM.get(plate_name.value) + if plate is None: + msg = f"Unknown ITRF2014 plate: {plate_name}. Options: {list(ITRF2014_PMM)}" + raise ValueError(msg) + return EulerPole( + wx_mas_yr=plate.omega_x, wy_mas_yr=plate.omega_y, wz_mas_yr=plate.omega_z + ) + + +def run( + los_enu_path: str, + plate_name: PlateName, + out: str | None = "rotation_los_enu.tif", + match_file: Path | str | None = None, + subsample: int = 10, +) -> None: + """Compute plate-motion computation in the radar line-of-sight. + + Parameters + ---------- + los_enu_path : Path | str + Path / url to line-of-sight 3-band east, north, up raster + plate_name : str + Name of plate in ITRF2014 table + out : str, optional + Output LOS velocity GeoTIFF + match_file : Path | str, optional + If provided, outputs `out` to match the size and projection of `match_file`. + Otherwise, uses `subsample` and outputs in EPSG:4326. + subsample : int + Decimation factor to apply in x and y before computation. + Default is 10 (Output is 100x smaller than `los_enu_path`) + + """ + da_los_enu = rxr.open_rasterio(los_enu_path, default_name="los_enu") + assert not isinstance(da_los_enu, list) + + if match_file is not None: + da_match = rxr.open_rasterio(match_file) + da_los_enu_warped = da_los_enu.rio.reproject_match(da_match) + else: + da_los_enu = da_los_enu[:, ::subsample, ::subsample].astype("float32") + da_los_enu_warped = da_los_enu.rio.reproject("epsg:4326") + + # Create EulerPole if given + euler = _build_euler_from_inputs(plate_name) + + # Coordinates for DEM grid + lon, lat = np.meshgrid( + da_los_enu_warped.y.values, da_los_enu_warped.x.values, indexing="ij" + ) + lon = lon.astype("float32") + lat = lat.astype("float32") + + # Compute ENU rotation component (m/yr) + ve_rot, vn_rot, vu_rot = euler.velocity_enu_m_per_yr(lat, lon, alt_m=0.0) + v_enu = np.stack([ve_rot, vn_rot, vu_rot]) + + da_v_los = np.sum(v_enu * da_los_enu_warped, axis=0) + da_v_los.attrs["units"] = "meters / year" + da_v_los.rio.write_nodata(0).rio.to_raster( + out, dtype="float32", tiled="yes", compress="deflate" + ) + + +if __name__ == "__main__": + tyro.cli(run) diff --git a/src/opera_utils/tropo/__init__.py b/src/opera_utils/tropo/__init__.py index c094b7c..dab58fb 100644 --- a/src/opera_utils/tropo/__init__.py +++ b/src/opera_utils/tropo/__init__.py @@ -2,5 +2,6 @@ from ._apply import apply_tropo from ._crop import crop_tropo +from ._search import search __all__ = ["apply_tropo", "crop_tropo"] diff --git a/src/opera_utils/tropo/_product.py b/src/opera_utils/tropo/_product.py new file mode 100644 index 0000000..11683f1 --- /dev/null +++ b/src/opera_utils/tropo/_product.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +import logging +from dataclasses import dataclass +from datetime import datetime +from typing import Any + +from opera_utils._cmr import UrlType, _pick_related_url + +__all__ = ["TropoProduct"] + +logger = logging.getLogger("opera_utils") + +TROPO_SHORT_NAME = "OPERA_L4_TROPO-ZENITH_V1" + + +@dataclass(frozen=True) +class TropoProduct: + """Parsed OPERA L4 TROPO-ZENITH granule metadata (one 6-hour global field).""" + + granule_ur: str + start: datetime + end: datetime + product_version: str | None + + # URLs (if present) + https_url: str | None + s3_url: str | None + browse_png_url: str | None + + size_in_bytes: int | None + + @property + def mid_datetime(self) -> datetime: + return self.start + (self.end - self.start) / 2 + + @property + def cadence_hours(self) -> float: + return (self.end - self.start).total_seconds() / 3600.0 + + def url(self, preferred: UrlType = UrlType.HTTPS) -> str | None: + if preferred is UrlType.S3 and self.s3_url: + return self.s3_url + return self.https_url + + @property + def filename(self) -> str | None: + u = self.url(UrlType.HTTPS) or self.url(UrlType.S3) + return None if u is None else u.split("/")[-1] + + @staticmethod + def from_umm(umm: dict[str, Any]) -> TropoProduct: + # Temporal + rng = umm.get("TemporalExtent", {}).get("RangeDateTime", {}) + start = datetime.fromisoformat(rng["BeginningDateTime"]) + end = datetime.fromisoformat(rng["EndingDateTime"]) + + granule_ur: str = umm["GranuleUR"] + + # Additional attributes + aa = umm.get("AdditionalAttributes", []) or [] + aa_map = {item.get("Name"): (item.get("Values") or [None])[0] for item in aa} + product_version = aa_map.get("PRODUCT_VERSION") + + # URLs + https_url = _pick_related_url(umm, kind="GET DATA", startswith="https") + s3_url = _pick_related_url( + umm, kind="GET DATA VIA DIRECT ACCESS", startswith="s3" + ) + browse_png_url = _pick_related_url( + umm, kind="GET RELATED VISUALIZATION", endswith=".png" + ) + + # DataGranule / size / md5 + archive_info = ( + umm.get("DataGranule", {}).get("ArchiveAndDistributionInformation", []) + or [] + ) + size_in_bytes = None + if archive_info: + # First entry is typically the .nc + primary = archive_info[0] + size_in_bytes = primary.get("SizeInBytes") + + return TropoProduct( + granule_ur=granule_ur, + start=start, + end=end, + product_version=product_version, + https_url=https_url, + s3_url=s3_url, + browse_png_url=browse_png_url, + size_in_bytes=size_in_bytes, + ) diff --git a/src/opera_utils/tropo/_search.py b/src/opera_utils/tropo/_search.py new file mode 100644 index 0000000..b01745c --- /dev/null +++ b/src/opera_utils/tropo/_search.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +import logging +from datetime import datetime + +from opera_utils._cmr import UrlType, _cmr_search + +from ._product import TropoProduct + +__all__ = ["search"] + +logger = logging.getLogger("opera_utils") + +# CMR short name for TROPO v1 +TROPO_SHORT_NAME = "OPERA_L4_TROPO-ZENITH_V1" + + +def search( + *, + product_version: str | None = None, + start_datetime: datetime | None = None, + end_datetime: datetime | None = None, + url_type: UrlType = UrlType.HTTPS, + use_uat: bool = False, + print_urls: bool = False, +) -> list[TropoProduct]: + """Search CMR for OPERA TROPO-ZENITH granules and return parsed products. + + Notes + ----- + If no start/end are given, CMR's full record is returned (can be large). + + Examples + -------- + >>> prods = search_tropo( + ... start_datetime=datetime(2016, 7, 1, tzinfo=timezone.utc), + ... end_datetime=datetime(2016, 7, 2, tzinfo=timezone.utc), + ... ) + >>> prods[0].url() # HTTPS by default + 'https://cumulus.asf.earthdatacloud.nasa.gov/OPERA/.../OPERA_L4_TROPO-ZENITH_....nc' + + """ + attrs: list[str] = [] + if product_version: + if not product_version.startswith("v"): + product_version = "v" + product_version + # here values are strings (e.g., "v1.0"), so use string matching. + attrs.append(f"string,PRODUCT_VERSION,{product_version}") + + # Fetch raw UMMs + umms = _cmr_search( + short_name=TROPO_SHORT_NAME, + start_datetime=start_datetime, + end_datetime=end_datetime, + attributes=attrs or None, + use_uat=use_uat, + ) + + # Parse and do precise temporal filtering by start time (inclusive) + prods = [TropoProduct.from_umm(u) for u in umms] + prods.sort(key=lambda p: p.start) + + if print_urls: + for p in prods: + u = p.url(url_type) or p.url(UrlType.HTTPS) or p.url(UrlType.S3) + if u: + print(u) + + return prods