From 89f618c601ecbf3f6704207c62f0031f1af6da34 Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Thu, 22 Jan 2026 16:12:38 -0500 Subject: [PATCH 01/14] Specify media type for query requests --- ogc/edr/edr_routes.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ogc/edr/edr_routes.py b/ogc/edr/edr_routes.py index 35160cd..ffb6544 100644 --- a/ogc/edr/edr_routes.py +++ b/ogc/edr/edr_routes.py @@ -253,8 +253,11 @@ def collection_query( if "fn" in content and "fp" in content: # Return the file name in the header and the content as only the binary data filename = content["fn"] + headers["Content-Type"] = "image/tiff" headers["Content-Disposition"] = f"attachment; filename={filename}" # Decode the content string which is the Base64 representation of the data content = io.BytesIO(base64.b64decode(content["fp"])) + else: + headers["Content-Type"] = "application/prs.coverage+json" return headers, http_status, content From f46921e682e8b27b1736cba84d7cc396ace1f863 Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Fri, 30 Jan 2026 14:36:25 +0000 Subject: [PATCH 02/14] Force log level to be set from configuration --- ogc/edr/edr_config.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/ogc/edr/edr_config.py b/ogc/edr/edr_config.py index 488f83b..beb3b9e 100644 --- a/ogc/edr/edr_config.py +++ b/ogc/edr/edr_config.py @@ -1,5 +1,6 @@ import os import json +import logging import numpy as np import traitlets as tl from datetime import datetime @@ -53,6 +54,11 @@ def get_configuration(base_url: str, layers: List[pogc.Layer]) -> Dict[str, Any] resources = configuration.get("resources", {}) configuration["resources"] = resources | EdrConfig._resources_definition(base_url, layers) + # Force the log level based on the configuration as it is loaded, otherwise it is ignored + if configuration.get("logging", {}).get("level"): + api_logger = logging.getLogger("pygeoapi") + api_logger.setLevel(configuration["logging"]["level"]) + return configuration @staticmethod From 9d675b49e28a43cf818f82c409e288dc550d3450 Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Fri, 30 Jan 2026 14:37:36 +0000 Subject: [PATCH 03/14] Improve default formatting for requests --- ogc/servers.py | 12 ++++++++---- ogc/settings.py | 6 ++++++ 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/ogc/servers.py b/ogc/servers.py index fe8fda8..efa6213 100755 --- a/ogc/servers.py +++ b/ogc/servers.py @@ -17,6 +17,7 @@ from ogc.ogc_common import WCSException from pygeoapi.api import APIRequest from pygeoapi.util import get_api_rules +from . import settings logger = logging.getLogger(__name__) @@ -193,7 +194,7 @@ def method(): self.add_url_rule( f"/{endpoint}/edr/collections//instances//", endpoint=f"{endpoint}_instance_query", - view_func=self.edr_render(ogc.edr_routes.collection_query), + view_func=self.edr_render(ogc.edr_routes.collection_query, default_format=settings.GEOTIFF), methods=["GET"], strict_slashes=strict_slashes, ) @@ -258,7 +259,7 @@ def ogc_render(self, ogc_idx): ee = WCSException() return respond_xml(ee.to_xml(), status=500) - def edr_render(self, callable: Callable) -> Callable: + def edr_render(self, callable: Callable, default_format: str | None = "json") -> Callable: """Function which returns a wrapper for the provided callable. Filters arguments and handles any necessary exceptions. @@ -266,6 +267,8 @@ def edr_render(self, callable: Callable) -> Callable: ---------- callable : Callable The callable request handler to be wrapped. + default_format: str | None + The optional default data output format, by default "json". Returns ------- @@ -302,8 +305,9 @@ def wrapper(*args, **kwargs) -> Response: for (k, v) in request.args.items() } # Replace format with its lowercase version to match pygeoapi expectations - if filtered_args.get("f", None): - filtered_args["f"] = filtered_args["f"].lower() + format_argument = filtered_args.get("f", default_format) + if format_argument is not None: + filtered_args["f"] = format_argument.lower() filtered_args["base_url"] = request.base_url diff --git a/ogc/settings.py b/ogc/settings.py index 61f5843..1cb5686 100755 --- a/ogc/settings.py +++ b/ogc/settings.py @@ -37,6 +37,12 @@ crs_84_uri_format: {"minx": -180.0, "miny": -90.0, "maxx": 180.0, "maxy": 90.0}, } +# EDR query output formats +GEOTIFF = "GeoTIFF" +JSON = "JSON" +COVERAGE_JSON = "CoverageJSON" +EDR_QUERY_FORMATS = [GEOTIFF, COVERAGE_JSON, JSON] + # WMS Capabilities timestamp format USE_TIMES_LIST = False PAST_DAYS_INCLUDED = 7 From d2108abcb88cb759b08b9badad294459b54b793c Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Fri, 30 Jan 2026 14:39:22 +0000 Subject: [PATCH 04/14] Fix coverage metadata based on coordinates --- ogc/edr/edr_provider.py | 32 ++++++++++++++++++++++++-------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/ogc/edr/edr_provider.py b/ogc/edr/edr_provider.py index dd1530d..ed0c731 100644 --- a/ogc/edr/edr_provider.py +++ b/ogc/edr/edr_provider.py @@ -612,16 +612,32 @@ def to_coverage_json( }, "referencing": [ { - "coordinates": ["lon", "lat"], + "coordinates": ["x", "y"], "system": {"type": "GeographicCRS", "id": crs}, }, - { - "coordinates": ["t"], - "system": { - "type": "TemporalRS", - "calendar": "Gregorian", - }, - }, + *( + [ + { + "coordinates": ["t"], + "system": { + "type": "TemporalRS", + "calendar": "Gregorian", + }, + } + ] + if "time" in coordinates.dims + else [] + ), + *( + [ + { + "coordinates": ["z"], + "system": {"type": "VerticalCRS"}, + } + ] + if "alt" in coordinates.dims + else [] + ), ], "parameters": {}, "ranges": {}, From 1603eab42e271c76e5d08907241bf67e62aa38a3 Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Fri, 30 Jan 2026 14:41:18 +0000 Subject: [PATCH 05/14] Improve EDR based on compliance testing --- ogc/edr/edr_api.py | 350 ++++++++++++++++++++++++++++++ ogc/edr/edr_config.py | 71 ++++-- ogc/edr/edr_provider.py | 318 ++++++++++++++++----------- ogc/edr/edr_routes.py | 14 +- ogc/edr/test/test_edr_config.py | 2 +- ogc/edr/test/test_edr_provider.py | 42 ++-- ogc/edr/test/test_edr_routes.py | 8 +- ogc/podpac.py | 39 +++- 8 files changed, 670 insertions(+), 174 deletions(-) create mode 100644 ogc/edr/edr_api.py diff --git a/ogc/edr/edr_api.py b/ogc/edr/edr_api.py new file mode 100644 index 0000000..87bf92b --- /dev/null +++ b/ogc/edr/edr_api.py @@ -0,0 +1,350 @@ +import json +import numpy as np +import pygeoapi.api +import pygeoapi.api.environmental_data_retrieval as pygeoedr +from http import HTTPStatus +from datetime import datetime, timezone +from typing import Tuple, List, Dict, Any, Union +from ogc import podpac as pogc +from pygeoapi.plugin import load_plugin +from pygeoapi.util import filter_dict_by_key_value, to_json, get_provider_by_type +from pygeoapi.api import API, APIRequest +from pygeoapi.linked_data import jsonldify +from .edr_provider import EdrProvider + + +class EdrAPI: + """Used to modify the default responses before returning data to the user.""" + + @jsonldify + @staticmethod + def landing_page(api: API, request: APIRequest) -> Tuple[dict, int, str]: + """Provide the API landing page. + + Parameters + ---------- + api : API + The API which handles the request. + request : APIRequest + The request object. + + Returns + ------- + Tuple[dict, int, str] + Headers, HTTP Status, and Content returned as a tuple. + """ + return pygeoapi.api.landing_page(api, request) + + @staticmethod + def openapi_(api: API, request: APIRequest) -> Tuple[dict, int, str]: + """Provide the OpenAPI documentation. + + Parameters + ---------- + api : API + The API which handles the request. + request : APIRequest + The request object. + + Returns + ------- + Tuple[dict, int, str] + Headers, HTTP Status, and Content returned as a tuple. + """ + return pygeoapi.api.openapi_(api, request) + + @staticmethod + def conformance(api: API, request: APIRequest) -> Tuple[dict, int, str]: + """Provide the conformance definition. + + Parameters + ---------- + api : API + The API which handles the request. + request : APIRequest + The request object. + + Returns + ------- + Tuple[dict, int, str] + Headers, HTTP Status, and Content returned as a tuple. + """ + return pygeoapi.api.conformance(api, request) + + @jsonldify + @staticmethod + def describe_collections(api: API, request: APIRequest, dataset: str | None = None) -> Tuple[dict, int, str]: + """Provide the collection/collections metadata. + + Overrides default functionality to append additional metadata to collections. + + Parameters + ---------- + api : API + The API which handles the request. + request : APIRequest + The request object. + dataset : str | None, optional + The dataset (collection) to be described or None for all collections, by default None. + + Returns + ------- + Tuple[dict, int, str] + Headers, HTTP Status, and Content returned as a tuple. + """ + headers, status, content = pygeoapi.api.describe_collections(api, request, dataset) + if request.format != pygeoapi.api.F_JSON or status != HTTPStatus.OK: + return headers, status, content + + collection_description = json.loads(content) + collection_configuration = filter_dict_by_key_value(api.config["resources"], "type", "collection") + + if dataset is not None: + collections = [collection_description] + else: + collections = collection_description.get("collections", []) + + for collection in collections: + collection_id = collection["id"] + provider = get_provider_by_type(collection_configuration[collection_id]["providers"], "edr") + provider_plugin = load_plugin("provider", provider) + provider_parameters = provider_plugin.get_fields() + + extents = collection.get("extent", {}) + if "vertical" in collection_configuration[collection_id]["extents"]: + extents = extents | {"vertical": collection_configuration[collection_id]["extents"]["vertical"]} + if "temporal" in collection_configuration[collection_id]["extents"]: + times = collection_configuration[collection_id]["extents"]["temporal"].get("values", []) + trs = collection_configuration[collection_id]["extents"]["temporal"].get("trs") + temporal_extents = EdrAPI._temporal_extents(times, trs) + extents = extents | temporal_extents + collection["extent"] = extents + + collection["output_formats"] = collection_configuration[collection_id].get("output_formats", []) + + height_units = collection_configuration[collection_id].get("height_units", []) + for query_type in collection["data_queries"]: + data_query_additions = { + "query_type": query_type, + **({"height_units": height_units} if query_type == "cube" else {}), + } + variables = collection["data_queries"][query_type]["link"].get("variables", {}) + collection["data_queries"][query_type]["link"]["variables"] = variables | data_query_additions + + for parameter in collection["parameter_names"]: + collection["parameter_names"][parameter] = collection["parameter_names"][parameter] | { + "description": provider_parameters[parameter].get("description", "") + } + + return headers, status, to_json(collection_description, api.pretty_print) + + @staticmethod + def get_collection_edr_instances( + api: API, request: APIRequest, dataset: str, instance_id: str | None = None + ) -> Tuple[dict, int, str]: + """Provide the instance/instances metadata. + + Overrides default functionality to append additional metadata to instances. + + Parameters + ---------- + api : API + The API which handles the request. + request : APIRequest + The request object. + dataset : str + The dataset (collection) to be described. + instance_id : str | None, optional + The instance to be described or None for all instances, by default None. + + Returns + ------- + Tuple[dict, int, str] + Headers, HTTP Status, and Content returned as a tuple. + """ + headers, status, content = pygeoedr.get_collection_edr_instances(api, request, dataset, instance_id) + if request.format != pygeoapi.api.F_JSON or status != HTTPStatus.OK: + return headers, status, content + + instance_description = json.loads(content) + collection_configuration = filter_dict_by_key_value(api.config["resources"], "type", "collection") + provider = get_provider_by_type(collection_configuration[dataset]["providers"], "edr") + provider_plugin = load_plugin("provider", provider) + provider_parameters = provider_plugin.get_fields() + base_url = provider["base_url"] + + if instance_id is not None: + instances = [instance_description] + else: + instances = instance_description.get("instances", []) + + collection_layers = EdrProvider.get_layers(base_url, dataset) + + for instance in instances: + collection_id = dataset + + extents = instance.get("extent", {}) + if "vertical" in collection_configuration[collection_id]["extents"]: + extents = extents | {"vertical": collection_configuration[collection_id]["extents"]["vertical"]} + + times = EdrProvider.get_datetimes(collection_layers, instance["id"]) + if len(times) > 0: + trs = collection_configuration[collection_id]["extents"]["temporal"].get("trs") + temporal_extents = EdrAPI._temporal_extents(times, trs) + extents = extents | temporal_extents + + bbox = collection_configuration[dataset]["extents"]["spatial"]["bbox"] + if not isinstance(bbox[0], list): + bbox = [bbox] + crs = collection_configuration[dataset]["extents"]["spatial"].get("crs") + spatial_extents = EdrAPI._spatial_extents(bbox, crs) + extents = extents | spatial_extents + instance["extent"] = extents + + instance["output_formats"] = collection_configuration[collection_id].get("output_formats", []) + + height_units = collection_configuration[collection_id].get("height_units") + for query_type in instance["data_queries"]: + data_query_additions = { + "query_type": query_type, + **({"height_units": height_units} if query_type == "cube" else {}), + } + variables = instance["data_queries"][query_type]["link"].get("variables", {}) + instance["data_queries"][query_type]["link"]["variables"] = variables | data_query_additions + + instance["parameter_names"] = EdrAPI._instance_parameters( + collection_layers, provider_parameters, instance["id"] + ) + + return headers, status, to_json(instance_description, api.pretty_print) + + @staticmethod + def get_collection_edr_query( + api: API, + request: APIRequest, + dataset: str, + instance: str | None, + query_type: str, + location_id: str | None = None, + ) -> Tuple[dict, int, str]: + """Query the collection or instance. + + Parameters + ---------- + api : API + The API which handles the request. + request : APIRequest + The request object. + dataset : str + The dataset (collection) to be queried. + instance_id : str | None + The instance to be queried or None if querying a collection. + query_type : str + The query type. + location_id : str | None, optional + Location identifier for location queries or None, by default None. + + Returns + ------- + Tuple[dict, int, str] + Headers, HTTP Status, and Content returned as a tuple. + """ + return pygeoedr.get_collection_edr_query(api, request, dataset, instance, query_type, location_id) + + @staticmethod + def _temporal_extents(times: List[Union[np.datetime64, datetime]], trs: str | None) -> Dict[str, Any]: + """Get the temporal extents for the provided times and reference system. + + Parameters + ---------- + times : List[Union[np.datetime64, datetime]] + Times used to create the temporal extent. + trs : str | None + The reference system for the times. + + Returns + ------- + Dict[str, Any] + The temporal extent object. + """ + iso_times = [] + for time in sorted(times): + dt = time.astype("datetime64[ms]").astype(datetime) if isinstance(time, np.datetime64) else time + if dt.tzinfo is None: + time_utc = dt.replace(tzinfo=timezone.utc) + else: + time_utc = dt.astimezone(timezone.utc) + iso_times.append(time_utc.isoformat().replace("+00:00", "Z")) + + return { + "temporal": { + "interval": [iso_times[0], iso_times[-1]] if len(iso_times) > 0 else [], + "values": iso_times, + "trs": trs, + } + } + + @staticmethod + def _spatial_extents(bbox: List[float], crs: str | None) -> Dict[str, Any]: + """Get the spatial extents for the provided bbox and reference system. + + Parameters + ---------- + bbox : List[float] + Bounding box used to create the spatial extent. + crs : str | None + The reference system for the bounding box. + + Returns + ------- + Dict[str, Any] + The spatial extent object. + """ + return { + "spatial": { + "bbox": bbox, + **({"crs": crs} if crs is not None else {}), + } + } + + @staticmethod + def _instance_parameters( + collection_layers: List[pogc.Layer], provider_parameters: Dict[str, Any], instance: str + ) -> Dict[str, Any]: + """Get the parameter metadata for the specific instance provided. + + Parameters + ---------- + collection_layers : List[pogc.Layer] + The layers available in the collection. + provider_parameters: Dict[str, Any] + The metadata for all available parameters in the collection from provider fields. + instance: str + The instance to determine parameters for. + + Returns + ------- + Dict[str, Any] + The metadata for available parameters in the instance. + """ + instance_parameters = {"parameter_names": {}} + for key, value in provider_parameters.items(): + layer = next((layer for layer in collection_layers if layer.identifier == key), None) + if layer is not None and instance in layer.time_instances(): + instance_parameters[key] = { + "id": key, + "type": "Parameter", + "name": value["title"], + "observedProperty": { + "label": {"id": key, "en": value["title"]}, + }, + "description": value["description"], + "unit": { + "label": {"en": value["title"]}, + "symbol": { + "value": value["x-ogc-unit"], + "type": "http://www.opengis.net/def/uom/UCUM/", + }, + }, + } + return instance_parameters diff --git a/ogc/edr/edr_config.py b/ogc/edr/edr_config.py index beb3b9e..f7fa1fa 100644 --- a/ogc/edr/edr_config.py +++ b/ogc/edr/edr_config.py @@ -1,9 +1,7 @@ import os import json import logging -import numpy as np import traitlets as tl -from datetime import datetime from collections import defaultdict from typing import List, Dict, Tuple, Any from ogc import podpac as pogc @@ -100,6 +98,8 @@ def _resources_definition(base_url: str, layers: List[pogc.Layer]) -> Dict[str, "description": f"Collection of data related to {group_name}", "keywords": ["podpac"], "extents": EdrConfig._generate_extents(group_layers), + "height_units": EdrConfig._vertical_units(group_layers), + "output_formats": settings.EDR_QUERY_FORMATS, "providers": [ { "type": "edr", @@ -112,7 +112,7 @@ def _resources_definition(base_url: str, layers: List[pogc.Layer]) -> Dict[str, settings.epsg_4326_uri_format, ], "format": { - "name": "geotiff", + "name": settings.GEOTIFF, "mimetype": "image/tiff", }, } @@ -138,8 +138,8 @@ def _generate_extents(layers: List[pogc.Layer]) -> Dict[str, Any]: The extents dictionary for the layers. """ llc_lon, llc_lat, urc_lon, urc_lat = None, None, None, None - min_time, max_time = None, None - time_range = None + time_range = set() + vertical_range = set() # Determine bounding box which holds all layers for layer in layers: llc_lon_tmp, llc_lat_tmp, urc_lon_tmp, urc_lat_tmp = EdrConfig._wgs84_bounding_box(layer) @@ -151,20 +151,15 @@ def _generate_extents(layers: List[pogc.Layer]) -> Dict[str, Any]: urc_lon = max(urc_lon, urc_lon_tmp) urc_lat = max(urc_lat, urc_lat_tmp) + coordinates_list = layer.get_coordinates_list() + if len(coordinates_list) > 0 and "alt" in coordinates_list[0].udims: + vertical_range.update(coordinates_list[0]["alt"].coordinates) + if hasattr(layer, "valid_times") and layer.valid_times is not tl.Undefined and len(layer.valid_times) > 0: - layer_min_time = np.min(layer.valid_times) - layer_max_time = np.max(layer.valid_times) - if any(time is None for time in [min_time, max_time]): - min_time = layer_min_time - max_time = layer_max_time - else: - min_time = min(min_time, layer_min_time) - max_time = max(max_time, layer_max_time) - - time_range = [ - min_time.isoformat(), - max_time.isoformat(), - ] + time_range.update(layer.valid_times) + + sorted_time_range = sorted(time_range) + sorted_vertical_range = sorted(vertical_range) return { "spatial": { @@ -174,12 +169,24 @@ def _generate_extents(layers: List[pogc.Layer]) -> Dict[str, Any]: **( { "temporal": { - "begin": datetime.fromisoformat(time_range[0]), # start datetime in RFC3339 - "end": datetime.fromisoformat(time_range[-1]), # end datetime in RFC3339 + "begin": sorted_time_range[0], # start datetime in RFC3339 + "end": sorted_time_range[-1], # end datetime in RFC3339 + "values": sorted_time_range, "trs": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian", } } - if time_range is not None + if len(sorted_time_range) > 0 + else {} + ), + **( + { + "vertical": { + "interval": [sorted_vertical_range[0], sorted_vertical_range[-1]], + "values": sorted_vertical_range, + "vrs": "http://www.opengis.net/def/uom/EPSG/0/9001", + } + } + if len(sorted_vertical_range) > 0 else {} ), } @@ -208,3 +215,25 @@ def _wgs84_bounding_box(layer: pogc.Layer) -> Tuple[float, float, float, float]: except Exception: crs_extents = settings.EDR_CRS[settings.crs_84_uri_format] return (crs_extents["minx"], crs_extents["miny"], crs_extents["maxx"], crs_extents["maxy"]) + + @staticmethod + def _vertical_units(layers: List[pogc.Layer]) -> List[str]: + """Retrieve the vertical units for the layers. + + Parameters + ---------- + layer : List[pogc.Layer] + The layers from which to get the vertical units. + + Returns + ------- + str | None + The vertical units available for the layers. + """ + vertical_units = set() + for layer in layers: + coordinates_list = layer.get_coordinates_list() + if len(coordinates_list) > 0 and coordinates_list[0].alt_units: + vertical_units.add(coordinates_list[0].alt_units) + + return list(vertical_units) diff --git a/ogc/edr/edr_provider.py b/ogc/edr/edr_provider.py index ed0c731..77ea213 100644 --- a/ogc/edr/edr_provider.py +++ b/ogc/edr/edr_provider.py @@ -2,7 +2,6 @@ import io import numpy as np import zipfile -import traitlets as tl from datetime import datetime from collections import defaultdict from typing import List, Dict, Tuple, Any @@ -34,20 +33,27 @@ def set_layers(cls, base_url: str, layers: List[pogc.Layer]): cls._layers_dict[base_url] = layers @classmethod - def get_layers(cls, base_url: str) -> List[pogc.Layer]: - """Get the layer resources for a specific base URL. + def get_layers(cls, base_url: str, group: str | None = None) -> List[pogc.Layer]: + """Get the layer resources for a specific base URL and group. Parameters ---------- base_url : str The base URL for the layers. + group : str | None, optional + Optional group to filter layers, by default None. + Returns ------- List[pogc.Layer] The layers associated with the base URL. """ - return cls._layers_dict.get(base_url, []) + layers = cls._layers_dict.get(base_url, []) + if group is not None: + return [layer for layer in layers if layer.group.lower() == group.lower()] + else: + return layers def __init__(self, provider_def: Dict[str, Any]): """Construct the provider using the provider definition. @@ -65,14 +71,15 @@ def __init__(self, provider_def: Dict[str, Any]): Raised if the provider does not specify any base URL. """ super().__init__(provider_def) - collection_id = provider_def.get("data", None) + collection_id = provider_def.get("data") if collection_id is None: raise ProviderConnectionError("Data not found.") self.collection_id = str(collection_id) + self.native_format = provider_def["format"]["name"] - self.base_url = provider_def.get("base_url", None) - if self.base_url is None: + self.base_url = provider_def.get("base_url", "") + if not self.base_url: raise ProviderConnectionError("Valid URL identifier not found for the data.") @property @@ -86,9 +93,7 @@ def parameters(self) -> Dict[str, pogc.Layer]: Dict[str, pogc.Layer] The parameters as a dictionary of layer identifiers and layer objects. """ - return { - layer.identifier: layer for layer in self.get_layers(self.base_url) if layer.group == self.collection_id - } + return {layer.identifier: layer for layer in self.get_layers(self.base_url, self.collection_id)} def handle_query(self, requested_coordinates: podpac.Coordinates, **kwargs): """Handle the requests to the EDR server at the specified requested coordinates. @@ -136,38 +141,51 @@ def handle_query(self, requested_coordinates: podpac.Coordinates, **kwargs): z_arg = kwargs.get("z") output_format = str(output_format).lower() - available_times = self.get_datetimes(list(self.parameters.values())) + self.check_query_condition( + not any(output_format == query_format.lower() for query_format in settings.EDR_QUERY_FORMATS), + "Invalid output format provided.", + ) + + available_times = self.get_datetimes(list(self.parameters.values()), instance) available_altitudes = self.get_altitudes(list(self.parameters.values())) - time_coords = self.interpret_time_coordinates(available_times, datetime_arg, requested_coordinates.crs) + time_coords = self.interpret_time_coordinates( + available_times, datetime_arg, instance, requested_coordinates.crs + ) altitude_coords = self.interpret_altitude_coordinates(available_altitudes, z_arg, requested_coordinates.crs) - # Allow parameters without case-sensitivity - parameters_lower = [param.lower() for param in requested_parameters or []] - parameters_filtered = { - key: value - for key, value in self.parameters.items() - if key.lower() in parameters_lower and value is not None - } - - self.check_query_condition(datetime_arg is not None and time_coords is None, "Invalid datetime provided.") - self.check_query_condition(z_arg is not None and altitude_coords is None, "Invalid altitude provided.") + # Allow parameters without case-sensitivity, default to using all parameters + parameters_filtered = self.parameters + if requested_parameters is not None and len(requested_parameters) > 0: + parameters_lower = [param.lower() for param in requested_parameters or []] + parameters_filtered = { + key: value + for key, value in self.parameters.items() + if key.lower() in parameters_lower and value is not None + } self.check_query_condition(len(parameters_filtered) == 0, "Invalid parameters provided.") self.check_query_condition( - instance is not None and not self.validate_datetime(instance), "Invalid instance time provided." + instance is not None and instance not in self.instances(), "Invalid instance provided." ) if time_coords is not None: + self.check_query_condition( + len(time_coords["time"].coordinates) > 1 and output_format == settings.GEOTIFF.lower(), + "GeoTIFF output currently only supports single time requests.", + ) requested_coordinates = podpac.coordinates.merge_dims([time_coords, requested_coordinates]) if altitude_coords is not None: + self.check_query_condition( + len(altitude_coords["alt"].coordinates) > 1 and output_format == settings.GEOTIFF.lower(), + "GeoTIFF output currently only supports single altitude requests.", + ) + self.check_query_condition(len(parameters_filtered) == 0, "Invalid parameters provided.") requested_coordinates = podpac.coordinates.merge_dims([altitude_coords, requested_coordinates]) # Handle defining native coordinates for the query, these should match between each layer - coordinates_list = next(iter(parameters_filtered.values())).node.find_coordinates() + coordinates_list = next(iter(parameters_filtered.values())).get_coordinates_list() self.check_query_condition(len(coordinates_list) == 0, "Native coordinates not found.") - requested_native_coordinates = self.get_native_coordinates( - requested_coordinates, coordinates_list[0], np.datetime64(instance) - ) + requested_native_coordinates = self.get_native_coordinates(requested_coordinates, coordinates_list[0]) self.check_query_condition( bool(requested_native_coordinates.size > settings.MAX_GRID_COORDS_REQUEST_SIZE), @@ -176,26 +194,18 @@ def handle_query(self, requested_coordinates: podpac.Coordinates, **kwargs): dataset = {} for requested_parameter, layer in parameters_filtered.items(): - units_data_array = layer.node.eval(requested_native_coordinates) - # Recombine stacked temporal dimensions if necessary. - # The temporal output should always be stacked, based on stacked input. - if "time_forecastOffsetHr" in units_data_array.dims: - forecast_offsets = units_data_array.forecastOffsetHr.data.copy() - time_data = units_data_array.time.data.copy() - units_data_array = units_data_array.drop_vars({"time", "time_forecastOffsetHr", "forecastOffsetHr"}) - units_data_array = units_data_array.rename(time_forecastOffsetHr="time") - units_data_array = units_data_array.assign_coords(time=time_data + forecast_offsets) - dataset[requested_parameter] = units_data_array + units_data_array = EdrProvider.evaluate_layer(requested_native_coordinates, layer) + if units_data_array is not None: + dataset[requested_parameter] = units_data_array self.check_query_condition(len(dataset) == 0, "No matching parameters found.") - # Return a coverage json if specified, else return Base64 encoded native response - if output_format == "json" or output_format == "coveragejson": + if output_format == settings.COVERAGE_JSON.lower() or output_format == settings.JSON.lower(): crs = self.interpret_crs(requested_native_coordinates.crs if requested_native_coordinates else None) - layers = self.get_layers(self.base_url) + layers = self.get_layers(self.base_url, self.collection_id) return self.to_coverage_json(layers, dataset, crs) - else: - return self.to_geotiff_response(dataset, self.collection_id) + + return self.to_geotiff_response(dataset, self.collection_id) def position(self, **kwargs): """Handles requests for the position query type. @@ -261,15 +271,23 @@ def cube(self, **kwargs): crs = kwargs.get("crs") crs = EdrProvider.interpret_crs(crs) - if not isinstance(bbox, List) or len(bbox) != 4: - msg = "Invalid bounding box provided, expected bounding box of (minx, miny, maxx, maxy)." + if not isinstance(bbox, List) or (len(bbox) != 4 and len(bbox) != 6): + msg = ( + "Invalid bounding box provided, " + "expected bounding box of (minx, miny, maxx, maxy) or (minx, miny, minz, maxx, maxy, maxz)." + ) raise ProviderInvalidQueryError(msg, user_msg=msg) - xmin, ymin, xmax, ymax = bbox - lon, lat = EdrProvider.crs_converter([xmin, xmax], [ymin, ymax], crs) + if len(bbox) == 6: + xmin, ymin, zmin, xmax, ymax, zmax = bbox + # Set the z argument if not specified using a closed interval from the bounding box data + if kwargs["z"] is None: + kwargs["z"] = f"{zmin}/{zmax}" + else: + xmin, ymin, xmax, ymax = bbox + lon, lat = EdrProvider.crs_converter([xmin, xmax], [ymin, ymax], crs) requested_coordinates = podpac.Coordinates([lat, lon], dims=["lat", "lon"], crs=crs) - return self.handle_query(requested_coordinates, **kwargs) def area(self, **kwargs): @@ -336,10 +354,9 @@ def instances(self, **kwargs) -> List[str]: The instances available in the collection. """ instances = set() - layers = self.get_layers(self.base_url) - for layer in layers: - if layer.group == self.collection_id: - instances.update(layer.time_instances()) + collection_layers = self.get_layers(self.base_url, self.collection_id) + for layer in collection_layers: + instances.update(layer.time_instances()) return list(instances) def get_fields(self) -> Dict[str, Any]: @@ -352,14 +369,52 @@ def get_fields(self) -> Dict[str, Any]: """ fields = {} for parameter_key, layer in self.parameters.items(): - units = layer.node.units if layer.node.units is not None else layer.node.style.units fields[parameter_key] = { "type": "number", "title": parameter_key, - "x-ogc-unit": units, + "description": f"{layer.title} ({layer.abstract})", + "x-ogc-unit": layer.get_units(), } return fields + @staticmethod + def evaluate_layer(requested_coordinates: podpac.Coordinates, layer: pogc.Layer) -> podpac.UnitsDataArray | None: + """Evaluate a layer using the requested coordinates. + + Temporal coordinates are ignored if the layer does not include them. + + Parameters + ---------- + requested_coordinates : podpac.Coordinates + The requested coordinates for the evaluation. + layer : pogc.Layer + The layer to evaluate. + + Returns + ------- + podpac.UnitsDataArray + The units data array returned from evaluation or None if the node was not found. + """ + coordinates = layer.get_coordinates_list() + layer_requested_coordinates = requested_coordinates + units_data_array = None + if len(coordinates) > 0 and "time" not in coordinates[0].udims: + layer_requested_coordinates = layer_requested_coordinates.udrop( + ["time", "forecastOffsetHr"], ignore_missing=True + ) + if layer.node is not None: + units_data_array = layer.node.eval(layer_requested_coordinates) + if "forecastOffsetHr" in units_data_array.dims or "time_forecastOffsetHr" in units_data_array.dims: + if "time_forecastOffsetHr" not in units_data_array.dims: + units_data_array = units_data_array.stack(time_forecastOffsetHr=("time", "forecastOffsetHr")) + forecast_offsets = units_data_array.forecastOffsetHr.data.copy() + time_data = units_data_array.time.data.copy() + units_data_array = units_data_array.drop_vars({"time", "time_forecastOffsetHr", "forecastOffsetHr"}) + units_data_array = units_data_array.rename(time_forecastOffsetHr="time") + units_data_array = units_data_array.assign_coords(time=time_data + forecast_offsets) + + return units_data_array + @staticmethod def get_altitudes(layers: List[pogc.Layer]) -> List[float]: """The list of available altitudes for the provided layers. @@ -377,31 +432,43 @@ def get_altitudes(layers: List[pogc.Layer]) -> List[float]: available_altitudes = set() for layer in layers: - coordinates_list = layer.node.find_coordinates() + coordinates_list = layer.get_coordinates_list() if len(coordinates_list) > 0 and "alt" in coordinates_list[0].udims: available_altitudes.update(coordinates_list[0]["alt"].coordinates) return list(available_altitudes) @staticmethod - def get_datetimes(layers: List[pogc.Layer]) -> List[str]: + def get_datetimes(layers: List[pogc.Layer], instance_time: str | None) -> List[np.datetime64]: """The list of available times for the provided layers. Parameters ---------- layers : List[pogc.Layer] The list of layers to determine datetimes for. - + instance_time: str | None + The optional instance time which forecast datetimes are being requested for. Returns ------- - List[str] - Available time strings for the provider layers. + List[np.datetime64] + Available time values for the provider layers. """ available_times = set() for layer in layers: - if hasattr(layer, "valid_times") and layer.valid_times is not tl.Undefined and len(layer.valid_times) > 0: - available_times.update(layer.valid_times) + coordinates_list = layer.get_coordinates_list() + if len(coordinates_list) > 0 and "time" in coordinates_list[0].udims: + if instance_time in layer.time_instances() and "forecastOffsetHr" in coordinates_list[0].udims: + # Retrieve available forecastOffSetHr and instance time combinations + instance_datetime = np.datetime64(instance_time) + instance_coordinates = coordinates_list[0].select({"time": [instance_datetime, instance_datetime]}) + selected_offset_coordinates = instance_coordinates["forecastOffsetHr"].coordinates + available_times.update( + [np.datetime64(instance_time) + offset for offset in selected_offset_coordinates] + ) + elif not instance_time and "forecastOffsetHr" not in coordinates_list[0].udims: + # Retrieve layer times for non-instance requests + available_times.update(coordinates_list[0]["time"].coordinates) return list(available_times) @@ -454,7 +521,7 @@ def crs_converter(x: Any, y: Any, crs: str) -> Tuple[Any, Any]: Tuple[Any, Any] The X,Y as Longitude/Latitude data. """ - if crs.lower() == "epsg:4326": + if crs.lower() == settings.epsg_4326_uri_format.lower(): return (y, x) return (x, y) @@ -463,7 +530,7 @@ def crs_converter(x: Any, y: Any, crs: str) -> Tuple[Any, Any]: def interpret_altitude_coordinates( available_altitudes: List[float], altitude_string: str | None, crs: str | None ) -> podpac.Coordinates | None: - """Interpret the string into altitude coordinates using known formats. + """Interpret the altitude string into altitude coordinates using known formats. Specification: single-level = level @@ -483,10 +550,18 @@ def interpret_altitude_coordinates( Returns ------- podpac.Coordinates | None - Altitude coordinates for the request or None if conversion fails. + Altitude coordinates for the request or None if no matching altitudes were found. + + Raises + ------ + ProviderInvalidQueryError + Raised if the provided altitude string is invalid. """ - if not altitude_string or len(available_altitudes) == 0: + + if len(available_altitudes) == 0: return None + if not altitude_string: + return podpac.Coordinates([available_altitudes], dims=["alt"], crs=crs) try: altitudes = None @@ -504,15 +579,16 @@ def interpret_altitude_coordinates( else: altitudes = [float(alt) for alt in altitude_string.split(",")] except ValueError: - return None + msg = "Invalid vertical level requested." + raise ProviderInvalidQueryError(msg, user_msg=msg) return podpac.Coordinates([altitudes], dims=["alt"], crs=crs) if altitudes is not None else None @staticmethod def interpret_time_coordinates( - available_times: List[str], time_string: str | None, crs: str | None + available_times: List[np.datetime64], time_string: str | None, instance_time: str | None, crs: str | None ) -> podpac.Coordinates | None: - """Interpret the string into a list of times using known formats. + """Interpret the time string and instance into time coordinates using known formats. Specification: interval-closed = date-time "/" date-time @@ -523,26 +599,35 @@ def interpret_time_coordinates( Parameters ---------- - available_times: List[str] + available_times: List[np.datetime64] The available times for interpretation. time_string : str | None The string representation of the requested times. + instance_time: str | None + The string representation of the requested instance time. crs : str The CRS that the coordinates need to match. Returns ------- podpac.Coordinates | None - Time coordinates for the request or None if conversion fails. + Time coordinates for the request or None if no matching times were found. + + Raises + ------ + ProviderInvalidQueryError + Raised if the provided time string is invalid. """ - if not time_string or len(available_times) == 0: + if len(available_times) == 0: return None try: times = None - np_available_times = [np.datetime64(time) for time in available_times] - if "/" in time_string: + np_available_times = np.array(available_times) + if not time_string: + times = np_available_times + elif "/" in time_string: times_split = time_string.split("/") if len(times_split) == 2: minimum = times_split[0] @@ -560,9 +645,20 @@ def interpret_time_coordinates( else: times = [np.datetime64(time_string)] except ValueError: - return None + msg = "Invalid datetime requested." + raise ProviderInvalidQueryError(msg, user_msg=msg) - return podpac.Coordinates([times], dims=["time"], crs=crs) if times is not None else None + if times is None: + msg = "Invalid datetime requested." + raise ProviderInvalidQueryError(msg, user_msg=msg) + + if instance_time: + offsets = [np.timedelta64(time - np.datetime64(instance_time), "h") for time in times] + return podpac.Coordinates( + [[[instance_time] * len(offsets), offsets]], dims=[["time", "forecastOffsetHr"]], crs=crs + ) + + return podpac.Coordinates([times], dims=["time"], crs=crs) @staticmethod def to_coverage_json( @@ -652,28 +748,29 @@ def to_coverage_json( } for param, data_array in dataset.items(): - layer = next(layer for layer in layers if layer.identifier == param) - units = layer.node.units if layer.node.units is not None else layer.node.style.units - parameter_definition = { - param: { - "type": "Parameter", - "observedProperty": { - "id": param, - "label": layer.title, - "description": { - "en": layer.abstract, + layer = next((layer for layer in layers if layer.identifier == param), None) + if layer is not None: + units = layer.get_units() + parameter_definition = { + param: { + "type": "Parameter", + "observedProperty": { + "label": { + "id": param, + "en": param, + } }, - }, - "unit": { - "label": {"en": units}, - "symbol": { - "value": units, - "type": None, + "description": f"{layer.title} ({layer.abstract})", + "unit": { + "label": {"en": units}, + "symbol": { + "value": units, + "type": None, + }, }, - }, + } } - } - coverage_json["domain"]["parameters"].update(parameter_definition) + coverage_json["domain"]["parameters"].update(parameter_definition) coverage_json["domain"]["ranges"].update( { param: { @@ -765,10 +862,8 @@ def to_geotiff_response(dataset: Dict[str, podpac.UnitsDataArray], collection_id def get_native_coordinates( source_coordinates: podpac.Coordinates, target_coordinates: podpac.Coordinates, - source_time_instance: np.datetime64 | None, ) -> podpac.Coordinates: """Find the intersecting latitude and longitude coordinates between the source and target. - Convert time instances to stacked time and forecast offsets for node evalutation. Parameters ---------- @@ -776,8 +871,6 @@ def get_native_coordinates( The source coordinates to be converted. target_coordinates : podpac.Coordinates The target coordinates to find intersections on. - source_time_instance : np.datetime64 | None - The time instance of the source coordinates to convert to offsets. Returns ------- @@ -785,28 +878,11 @@ def get_native_coordinates( The converted coordinates source coordinates intersecting with the target coordinates. """ # Find intersections with target keeping source crs - source_intersection_coordinates = target_coordinates.intersect(source_coordinates, dims=["lat", "lon"]) + target_spatial_coordinates = podpac.Coordinates( + [target_coordinates["lat"], target_coordinates["lon"]], dims=["lat", "lon"] + ) + source_intersection_coordinates = target_spatial_coordinates.intersect(source_coordinates, dims=["lat", "lon"]) source_intersection_coordinates = source_intersection_coordinates.transform(source_coordinates.crs) - # Handle conversion from times and instance to time and offsets - if ( - "forecastOffsetHr" in target_coordinates.udims - and "time" in target_coordinates.udims - and "time" in source_coordinates.udims - and source_time_instance is not None - ): - time_deltas = [] - for time in source_coordinates["time"].coordinates: - offset = np.timedelta64(time - source_time_instance, "h") - time_deltas.append(offset) - - # This modifies the time coordinates to account for the new forecast offset hour - new_coordinates = podpac.Coordinates( - [[[source_time_instance] * len(time_deltas), time_deltas]], - [["time", "forecastOffsetHr"]], - crs=source_coordinates.crs, - ) - source_intersection_coordinates = podpac.coordinates.merge_dims( - [source_intersection_coordinates.udrop(["time", "forecastOffsetHr"]), new_coordinates] - ) - - return source_intersection_coordinates + return podpac.coordinates.merge_dims( + [source_intersection_coordinates, source_coordinates.udrop(["lat", "lon"], ignore_missing=True)] + ) diff --git a/ogc/edr/edr_routes.py b/ogc/edr/edr_routes.py index ffb6544..4217306 100644 --- a/ogc/edr/edr_routes.py +++ b/ogc/edr/edr_routes.py @@ -7,13 +7,13 @@ import pygeoapi.l10n import pygeoapi.plugin import pygeoapi.api -import pygeoapi.api.environmental_data_retrieval as pygeoedr from typing import Tuple, Any, Dict from http import HTTPStatus from copy import deepcopy from pygeoapi.openapi import get_oas from ogc import podpac as pogc +from .edr_api import EdrAPI from .edr_config import EdrConfig from .edr_provider import EdrProvider @@ -133,7 +133,7 @@ def landing_page(self, request: pygeoapi.api.APIRequest) -> Tuple[dict, int, str """ self.clean_configuration_cache() self.update_configuration_base_url(request) - return pygeoapi.api.landing_page(self.api, request) + return EdrAPI.landing_page(self.api, request) def openapi(self, request: pygeoapi.api.APIRequest) -> Tuple[dict, int, str | bytes]: """Handle API documentation requests for the server. @@ -150,7 +150,7 @@ def openapi(self, request: pygeoapi.api.APIRequest) -> Tuple[dict, int, str | by """ self.clean_configuration_cache() self.update_configuration_base_url(request) - return pygeoapi.api.openapi_(self.api, request) + return EdrAPI.openapi_(self.api, request) def conformance(self, request: pygeoapi.api.APIRequest) -> Tuple[dict, int, str | bytes]: """Handle conformance requests for the server. @@ -167,7 +167,7 @@ def conformance(self, request: pygeoapi.api.APIRequest) -> Tuple[dict, int, str """ self.clean_configuration_cache() self.update_configuration_base_url(request) - return pygeoapi.api.conformance(self.api, request) + return EdrAPI.conformance(self.api, request) def describe_collections( self, @@ -190,7 +190,7 @@ def describe_collections( """ self.clean_configuration_cache() self.update_configuration_base_url(request) - return pygeoapi.api.describe_collections(self.api, request, collection_id) + return EdrAPI.describe_collections(self.api, request, collection_id) def describe_instances( self, @@ -216,7 +216,7 @@ def describe_instances( """ self.clean_configuration_cache() self.update_configuration_base_url(request) - return pygeoedr.get_collection_edr_instances(self.api, request, collection_id, instance_id=instance_id) + return EdrAPI.get_collection_edr_instances(self.api, request, collection_id, instance_id=instance_id) def collection_query( self, @@ -245,7 +245,7 @@ def collection_query( """ self.clean_configuration_cache() self.update_configuration_base_url(request) - headers, http_status, content = pygeoedr.get_collection_edr_query( + headers, http_status, content = EdrAPI.get_collection_edr_query( self.api, request, collection_id, instance_id, query_type=query_type, location_id=None ) diff --git a/ogc/edr/test/test_edr_config.py b/ogc/edr/test/test_edr_config.py index 0a1a710..e651c49 100644 --- a/ogc/edr/test/test_edr_config.py +++ b/ogc/edr/test/test_edr_config.py @@ -23,7 +23,7 @@ def test_edr_configuration_contains_layer_groups(layers: List[pogc.Layer]): assert len(group_keys) > 0 for key in group_keys: - assert configuration["resources"].get(key, None) is not None + assert configuration["resources"].get(key) is not None def test_edr_configuration_contains_spatial_extent(layers: List[pogc.Layer], single_layer_cube_args: Dict[str, Any]): diff --git a/ogc/edr/test/test_edr_provider.py b/ogc/edr/test/test_edr_provider.py index 3c635af..b01553d 100644 --- a/ogc/edr/test/test_edr_provider.py +++ b/ogc/edr/test/test_edr_provider.py @@ -461,7 +461,7 @@ def test_edr_provider_datetime_single_value(): available_times = ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"] expected_times = [np.datetime64(available_times[0])] - time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None) + time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None, None) assert time_coords is not None np.testing.assert_array_equal(time_coords["time"].coordinates, expected_times) @@ -470,10 +470,12 @@ def test_edr_provider_datetime_single_value(): def test_edr_provider_datetime_range_closed(): """Test the datetime interpreter method of the EDR Provider class with a closed datetime range.""" time_string = "2025-10-24/2025-10-26" - available_times = ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"] - expected_times = [np.datetime64(time) for time in available_times[0:3]] + available_times = np.array( + ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"], dtype="datetime64" + ) + expected_times = [time for time in available_times[0:3]] - time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None) + time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None, None) assert time_coords is not None np.testing.assert_array_equal(time_coords["time"].coordinates, expected_times) @@ -482,10 +484,12 @@ def test_edr_provider_datetime_range_closed(): def test_edr_provider_datetime_open_start(): """Test the datetime interpreter method of the EDR Provider class with a open datetime start.""" time_string = "../2025-10-27" - available_times = ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"] - expected_times = [np.datetime64(time) for time in available_times[0:4]] + available_times = np.array( + ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"], dtype="datetime64" + ) + expected_times = [time for time in available_times[0:4]] - time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None) + time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None, None) assert time_coords is not None np.testing.assert_array_equal(time_coords["time"].coordinates, expected_times) @@ -494,10 +498,12 @@ def test_edr_provider_datetime_open_start(): def test_edr_provider_datetime_open_end(): """Test the datetime interpreter method of the EDR Provider class with a open datetime end.""" time_string = "2025-10-25/.." - available_times = ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"] - expected_times = [np.datetime64(time) for time in available_times[1:]] + available_times = np.array( + ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"], dtype="datetime64" + ) + expected_times = [time for time in available_times[1:]] - time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None) + time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None, None) assert time_coords is not None np.testing.assert_array_equal(time_coords["time"].coordinates, expected_times) @@ -506,11 +512,12 @@ def test_edr_provider_datetime_open_end(): def test_edr_provider_datetime_invalid_string(): """Test the datetime interpreter method of the EDR Provider class with an invalid string.""" time_string = "2025-10-25/../../.." - available_times = ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"] - - time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None) + available_times = np.array( + ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"], dtype="datetime64" + ) - assert time_coords is None + with pytest.raises(ProviderInvalidQueryError): + EdrProvider.interpret_time_coordinates(available_times, time_string, None, None) def test_edr_provider_altitude_single_value(): @@ -566,9 +573,8 @@ def test_edr_provider_altitude_invalid_string(): altitude_string = "../20" available_altitudes = [0.0, 5.0, 10.0, 15.0, 20.0] - altitude_coords = EdrProvider.interpret_altitude_coordinates(available_altitudes, altitude_string, None) - - assert altitude_coords is None + with pytest.raises(ProviderInvalidQueryError): + EdrProvider.interpret_altitude_coordinates(available_altitudes, altitude_string, None) def test_edr_provider_crs_interpreter_default_value(): @@ -597,4 +603,4 @@ def test_edr_provider_crs_converter(): lon = y lat = x - assert EdrProvider.crs_converter(x, y, "epsg:4326") == (lon, lat) + assert EdrProvider.crs_converter(x, y, "http://www.opengis.net/def/crs/EPSG/0/4326") == (lon, lat) diff --git a/ogc/edr/test/test_edr_routes.py b/ogc/edr/test/test_edr_routes.py index caa0cbd..249eac4 100644 --- a/ogc/edr/test/test_edr_routes.py +++ b/ogc/edr/test/test_edr_routes.py @@ -213,7 +213,6 @@ def test_edr_routes_collection_query(layers: List[pogc.Layer], single_layer_cube collection_id = layers[0].group instance_id = next(iter(layers[0].time_instances())) parameter_name = single_layer_cube_args["parameter-name"][0] - single_layer_cube_args["f"] = "json" request = mock_request(single_layer_cube_args) edr_routes = EdrRoutes(layers=layers) @@ -314,7 +313,7 @@ def test_edr_routes_collection_query_invalid_bbox(layers: List[pogc.Layer], sing def test_edr_routes_collection_query_missing_parameter( layers: List[pogc.Layer], single_layer_cube_args: Dict[str, Any] ): - """Test the EDR colletion query with a missing parameter. + """Test the EDR colletion query with a missing parameter. All parameters are expected to be returned. Parameters ---------- @@ -328,14 +327,15 @@ def test_edr_routes_collection_query_missing_parameter( request = mock_request(single_layer_cube_args) edr_routes = EdrRoutes(layers=layers) - _, status, _ = edr_routes.collection_query( + _, status, content = edr_routes.collection_query( request, collection_id=layers[0].group, instance_id=next(iter(layers[0].time_instances())), query_type="cube", ) - assert status == HTTPStatus.BAD_REQUEST + assert status == HTTPStatus.OK + assert content["domain"]["ranges"].keys() == {layer.identifier for layer in layers} def test_edr_routes_request_url_updates_configuration_url(): diff --git a/ogc/podpac.py b/ogc/podpac.py index e64a41f..4ef1e3e 100755 --- a/ogc/podpac.py +++ b/ogc/podpac.py @@ -16,6 +16,7 @@ import json import textwrap import re +from datetime import datetime def _uppercase_for_dict_keys(lower_dict): @@ -64,7 +65,7 @@ def time_instances(self) -> List[str]: List of available time instances as a strings. """ time_instances = set() - coordinates_list = self.node.find_coordinates() + coordinates_list = self.get_coordinates_list() # Time instances are created if a node has both time and offsets. if ( @@ -72,10 +73,44 @@ def time_instances(self) -> List[str]: and "time" in coordinates_list[0].udims and "forecastOffsetHr" in coordinates_list[0].udims ): - time_instances.update([str(time) for time in coordinates_list[0]["time"].coordinates]) + time_instances.update( + [ + time.astype("datetime64[ms]").astype(datetime).isoformat() + for time in coordinates_list[0]["time"].coordinates + ] + ) return list(time_instances) + def get_coordinates_list(self) -> List[Coordinates]: + """Retrieve the coordinates list from the node. + + Returns + ------- + List[Coordinates] + List of coordinates from the node. + """ + if self.node is None: + return [] + + return self.node.find_coordinates() + + def get_units(self) -> str | None: + """Retrieve the units from the node. + + Returns + ------- + str | None + The units from the node. + """ + units = None + if self.node is not None and self.node.units is not None: + units = self.node.units + elif self.node is not None and self.node.style is not None: + units = self.node.style.units + + return units + def get_node(self, args): return self.node From 96f102a98d9dac1859e3dbc65b4f8e68355917f9 Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Fri, 30 Jan 2026 11:44:35 -0500 Subject: [PATCH 06/14] HTML support to prepare for future availability --- ogc/edr/edr_provider.py | 6 +++++- ogc/edr/edr_routes.py | 3 +++ ogc/settings.py | 3 ++- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/ogc/edr/edr_provider.py b/ogc/edr/edr_provider.py index 77ea213..cf5d2cb 100644 --- a/ogc/edr/edr_provider.py +++ b/ogc/edr/edr_provider.py @@ -200,7 +200,11 @@ def handle_query(self, requested_coordinates: podpac.Coordinates, **kwargs): self.check_query_condition(len(dataset) == 0, "No matching parameters found.") - if output_format == settings.COVERAGE_JSON.lower() or output_format == settings.JSON.lower(): + if ( + output_format == settings.COVERAGE_JSON.lower() + or output_format == settings.JSON.lower() + or output_format == settings.HTML.lower() + ): crs = self.interpret_crs(requested_native_coordinates.crs if requested_native_coordinates else None) layers = self.get_layers(self.base_url, self.collection_id) return self.to_coverage_json(layers, dataset, crs) diff --git a/ogc/edr/edr_routes.py b/ogc/edr/edr_routes.py index 4217306..e883418 100644 --- a/ogc/edr/edr_routes.py +++ b/ogc/edr/edr_routes.py @@ -249,6 +249,9 @@ def collection_query( self.api, request, collection_id, instance_id, query_type=query_type, location_id=None ) + if "text/html" in headers.get("Content-Type", ""): + return headers, http_status, content + content = json.loads(content) if "fn" in content and "fp" in content: # Return the file name in the header and the content as only the binary data diff --git a/ogc/settings.py b/ogc/settings.py index 1cb5686..8b590f2 100755 --- a/ogc/settings.py +++ b/ogc/settings.py @@ -41,7 +41,8 @@ GEOTIFF = "GeoTIFF" JSON = "JSON" COVERAGE_JSON = "CoverageJSON" -EDR_QUERY_FORMATS = [GEOTIFF, COVERAGE_JSON, JSON] +HTML = "HTML" +EDR_QUERY_FORMATS = [GEOTIFF, COVERAGE_JSON, JSON, HTML] # WMS Capabilities timestamp format USE_TIMES_LIST = False From 9b14837f80b49a4c7758bb6bea0109171d175aac Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Mon, 2 Feb 2026 11:13:15 -0500 Subject: [PATCH 07/14] Add custom formatters for Geotiff and CoverageJSON --- ogc/edr/edr_config.py | 10 ++++++ ogc/edr/edr_formatter.py | 72 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+) create mode 100644 ogc/edr/edr_formatter.py diff --git a/ogc/edr/edr_config.py b/ogc/edr/edr_config.py index f7fa1fa..b5a025a 100644 --- a/ogc/edr/edr_config.py +++ b/ogc/edr/edr_config.py @@ -117,6 +117,16 @@ def _resources_definition(base_url: str, layers: List[pogc.Layer]) -> Dict[str, }, } ], + "formatters": [ + { + "name": "ogc.edr.edr_formatter.GeoTiffFormatter", + "mimetype": "image/tiff", + }, + { + "name": "ogc.edr.edr_formatter.CoverageJsonFormatter", + "mimetype": "application/prs.coverage+json", + }, + ], } } resources.update(resource) diff --git a/ogc/edr/edr_formatter.py b/ogc/edr/edr_formatter.py new file mode 100644 index 0000000..cf96c42 --- /dev/null +++ b/ogc/edr/edr_formatter.py @@ -0,0 +1,72 @@ +from pygeoapi.formatter.base import BaseFormatter +from pygeoapi.util import to_json + + +class GeoTiffFormatter(BaseFormatter): + """Formatter for GeoTIFF data. Defined for format link information to be populated.""" + + def __init__(self, formatter_def: dict): + """Initialize the formatter. + + Parameters + ---------- + formatter_def : dict + The formatter definition. + """ + + super().__init__(formatter_def) + self.f = "geotiff" + self.mimetype = formatter_def["mimetype"] + self.extension = "tiff" + + def write(self, options: dict = {}, data: dict | None = None) -> str: + """Generate data in the specified format. + + Parameters + ---------- + options : dict, optional + Formatting options, by default {}. + data : dict | None, optional + Dictionary representation of the data, by default None. + + Returns + ------- + str + String representation of the data. + """ + return to_json(data, True) if data is not None else "" + + +class CoverageJsonFormatter(BaseFormatter): + """Formatter for CoverageJSON data. Defined for format link information to be populated.""" + + def __init__(self, formatter_def: dict): + """Initialize the formatter. + + Parameters + ---------- + formatter_def : dict + The formatter definition. + """ + + super().__init__(formatter_def) + self.f = "coveragejson" + self.mimetype = formatter_def["mimetype"] + self.extension = "json" + + def write(self, options: dict = {}, data: dict | None = None) -> str: + """Generate data in the specified format. + + Parameters + ---------- + options : dict, optional + Formatting options, by default {}. + data : dict | None, optional + Dictionary representation of the data, by default None. + + Returns + ------- + str + String representation of the data. + """ + return to_json(data, True) if data is not None else "" From 37431b92771ae13531349737703fa6e1569b9b00 Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Tue, 3 Feb 2026 10:12:19 -0500 Subject: [PATCH 08/14] Use abstract only for description --- ogc/edr/edr_provider.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ogc/edr/edr_provider.py b/ogc/edr/edr_provider.py index cf5d2cb..9057d03 100644 --- a/ogc/edr/edr_provider.py +++ b/ogc/edr/edr_provider.py @@ -376,7 +376,7 @@ def get_fields(self) -> Dict[str, Any]: fields[parameter_key] = { "type": "number", "title": parameter_key, - "description": f"{layer.title} ({layer.abstract})", + "description": layer.abstract, "x-ogc-unit": layer.get_units(), } return fields @@ -764,7 +764,7 @@ def to_coverage_json( "en": param, } }, - "description": f"{layer.title} ({layer.abstract})", + "description": layer.abstract, "unit": { "label": {"en": units}, "symbol": { From ddc35f2e5c018f33467a5251cad6f7be48ddd88f Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Tue, 3 Feb 2026 10:13:16 -0500 Subject: [PATCH 09/14] CoverageJSON parameters match metadata parameters --- ogc/edr/edr_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ogc/edr/edr_provider.py b/ogc/edr/edr_provider.py index 9057d03..95e21d2 100644 --- a/ogc/edr/edr_provider.py +++ b/ogc/edr/edr_provider.py @@ -766,7 +766,7 @@ def to_coverage_json( }, "description": layer.abstract, "unit": { - "label": {"en": units}, + "label": {"en": param}, "symbol": { "value": units, "type": None, From 4ee3d4930eaa412177bac123b01839451dde5792 Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Tue, 3 Feb 2026 11:55:10 -0500 Subject: [PATCH 10/14] Resolve sonarqube cognitive complexity issues --- ogc/edr/edr_api.py | 28 +++----- ogc/edr/edr_provider.py | 140 ++++++++++++++++++++++++++++++++-------- 2 files changed, 122 insertions(+), 46 deletions(-) diff --git a/ogc/edr/edr_api.py b/ogc/edr/edr_api.py index 87bf92b..bcdf36b 100644 --- a/ogc/edr/edr_api.py +++ b/ogc/edr/edr_api.py @@ -98,11 +98,7 @@ def describe_collections(api: API, request: APIRequest, dataset: str | None = No collection_description = json.loads(content) collection_configuration = filter_dict_by_key_value(api.config["resources"], "type", "collection") - - if dataset is not None: - collections = [collection_description] - else: - collections = collection_description.get("collections", []) + collections = [collection_description] if dataset is not None else collection_description.get("collections", []) for collection in collections: collection_id = collection["id"] @@ -171,25 +167,17 @@ def get_collection_edr_instances( provider = get_provider_by_type(collection_configuration[dataset]["providers"], "edr") provider_plugin = load_plugin("provider", provider) provider_parameters = provider_plugin.get_fields() - base_url = provider["base_url"] - - if instance_id is not None: - instances = [instance_description] - else: - instances = instance_description.get("instances", []) - - collection_layers = EdrProvider.get_layers(base_url, dataset) + instances = [instance_description] if instance_id is not None else instance_description.get("instances", []) + collection_layers = EdrProvider.get_layers(provider["base_url"], dataset) for instance in instances: - collection_id = dataset - extents = instance.get("extent", {}) - if "vertical" in collection_configuration[collection_id]["extents"]: - extents = extents | {"vertical": collection_configuration[collection_id]["extents"]["vertical"]} + if "vertical" in collection_configuration[dataset]["extents"]: + extents = extents | {"vertical": collection_configuration[dataset]["extents"]["vertical"]} times = EdrProvider.get_datetimes(collection_layers, instance["id"]) if len(times) > 0: - trs = collection_configuration[collection_id]["extents"]["temporal"].get("trs") + trs = collection_configuration[dataset]["extents"]["temporal"].get("trs") temporal_extents = EdrAPI._temporal_extents(times, trs) extents = extents | temporal_extents @@ -201,9 +189,9 @@ def get_collection_edr_instances( extents = extents | spatial_extents instance["extent"] = extents - instance["output_formats"] = collection_configuration[collection_id].get("output_formats", []) + instance["output_formats"] = collection_configuration[dataset].get("output_formats", []) - height_units = collection_configuration[collection_id].get("height_units") + height_units = collection_configuration[dataset].get("height_units") for query_type in instance["data_queries"]: data_query_additions = { "query_type": query_type, diff --git a/ogc/edr/edr_provider.py b/ogc/edr/edr_provider.py index 95e21d2..682d546 100644 --- a/ogc/edr/edr_provider.py +++ b/ogc/edr/edr_provider.py @@ -121,18 +121,26 @@ def handle_query(self, requested_coordinates: podpac.Coordinates, **kwargs): Raises ------ + ProviderInvalidQueryError + Raised if an invalid instance is provided. + ProviderInvalidQueryError + Raised if an invalid parameter is provided. + ProviderInvalidQueryError + Raised if an invalid output format is provided. ProviderInvalidQueryError Raised if a datetime string is provided but cannot be interpreted. ProviderInvalidQueryError Raised if an altitude string is provided but cannot be interpreted. ProviderInvalidQueryError - Raised if the parameters are invalid. + Raised if a GeoTIFF request includes multiple time bands. ProviderInvalidQueryError - Raised if an instance is provided and it is invalid. + Raised if a GeoTIFF request includes multiple vertical bands. ProviderInvalidQueryError Raised if native coordinates could not be found. ProviderInvalidQueryError Raised if the request queries for native coordinates exceeding the max allowable size. + ProviderInvalidQueryError + Raised if no parameters could not be evaluated. """ instance = kwargs.get("instance") requested_parameters = kwargs.get("select_properties") @@ -140,11 +148,9 @@ def handle_query(self, requested_coordinates: podpac.Coordinates, **kwargs): datetime_arg = kwargs.get("datetime_") z_arg = kwargs.get("z") - output_format = str(output_format).lower() - self.check_query_condition( - not any(output_format == query_format.lower() for query_format in settings.EDR_QUERY_FORMATS), - "Invalid output format provided.", - ) + instance = self.validate_instance(instance) + requested_parameters = self.validate_parameters(requested_parameters) + output_format = self.validate_output_format(output_format) available_times = self.get_datetimes(list(self.parameters.values()), instance) available_altitudes = self.get_altitudes(list(self.parameters.values())) @@ -152,19 +158,6 @@ def handle_query(self, requested_coordinates: podpac.Coordinates, **kwargs): available_times, datetime_arg, instance, requested_coordinates.crs ) altitude_coords = self.interpret_altitude_coordinates(available_altitudes, z_arg, requested_coordinates.crs) - # Allow parameters without case-sensitivity, default to using all parameters - parameters_filtered = self.parameters - if requested_parameters is not None and len(requested_parameters) > 0: - parameters_lower = [param.lower() for param in requested_parameters or []] - parameters_filtered = { - key: value - for key, value in self.parameters.items() - if key.lower() in parameters_lower and value is not None - } - self.check_query_condition(len(parameters_filtered) == 0, "Invalid parameters provided.") - self.check_query_condition( - instance is not None and instance not in self.instances(), "Invalid instance provided." - ) if time_coords is not None: self.check_query_condition( @@ -177,14 +170,12 @@ def handle_query(self, requested_coordinates: podpac.Coordinates, **kwargs): len(altitude_coords["alt"].coordinates) > 1 and output_format == settings.GEOTIFF.lower(), "GeoTIFF output currently only supports single altitude requests.", ) - self.check_query_condition(len(parameters_filtered) == 0, "Invalid parameters provided.") + self.check_query_condition(len(requested_parameters) == 0, "Invalid parameters provided.") requested_coordinates = podpac.coordinates.merge_dims([altitude_coords, requested_coordinates]) # Handle defining native coordinates for the query, these should match between each layer - coordinates_list = next(iter(parameters_filtered.values())).get_coordinates_list() - + coordinates_list = next(iter(requested_parameters.values())).get_coordinates_list() self.check_query_condition(len(coordinates_list) == 0, "Native coordinates not found.") - requested_native_coordinates = self.get_native_coordinates(requested_coordinates, coordinates_list[0]) self.check_query_condition( @@ -193,7 +184,7 @@ def handle_query(self, requested_coordinates: podpac.Coordinates, **kwargs): ) dataset = {} - for requested_parameter, layer in parameters_filtered.items(): + for requested_parameter, layer in requested_parameters.items(): units_data_array = EdrProvider.evaluate_layer(requested_native_coordinates, layer) if units_data_array is not None: dataset[requested_parameter] = units_data_array @@ -292,6 +283,7 @@ def cube(self, **kwargs): lon, lat = EdrProvider.crs_converter([xmin, xmax], [ymin, ymax], crs) requested_coordinates = podpac.Coordinates([lat, lon], dims=["lat", "lon"], crs=crs) + return self.handle_query(requested_coordinates, **kwargs) def area(self, **kwargs): @@ -381,6 +373,102 @@ def get_fields(self) -> Dict[str, Any]: } return fields + def validate_output_format(self, output_format: str | None) -> str: + """Validate the output format for a query. + + If None provided, return the default. + If the provided output format is invalid, raise an error. + + Parameters + ---------- + output_format : str | None + The specified output format which needs to be validated. + + Returns + ------- + str + Output format string. + + Raises + ------ + ProviderInvalidQueryError + Raised if the provided output format is invalid. + """ + if output_format is None: + return self.native_format.lower() + + if output_format.lower() not in [key.lower() for key in settings.EDR_QUERY_FORMATS]: + msg = f"Invalid format provided, expected one of {', '.join(settings.EDR_QUERY_FORMATS)}" + raise ProviderInvalidQueryError(msg, user_msg=msg) + + return output_format.lower() + + def validate_instance(self, instance: str | None) -> str | None: + """Validate the instance for a query. + + If None provided, the collection is being queried. + If the instance is invalid, raise an error. + + Parameters + ---------- + instance : str | None + The instance which needs to be validated. + + Returns + ------- + str | None + The validated instance or None if the collection is being queried. + + Raises + ------ + ProviderInvalidQueryError + Raised if the provided instance is invalid. + """ + if instance is None: + return None + + if instance not in self.instances(): + msg = "Invalid instance provided." + raise ProviderInvalidQueryError(msg, user_msg=msg) + + return instance + + def validate_parameters(self, parameters: List[str] | None) -> Dict[str, pogc.Layer]: + """Validate the parameters for a query. + + If None provided or an list is empty, return all parameters. + If the provided parameter list is invalid, raise an error. + + Parameters + ---------- + parameters : List[str] | None + The specified parameters for a query. + + Returns + ------- + Dict[str, pogc.Layer] + The validated parameters dictionary containing associated layers. + + Raises + ------ + ProviderInvalidQueryError + Raised if the provided parameters are invalid. + """ + if parameters is None or len(parameters) == 0: + return self.parameters + + parameters_lower = [param.lower() for param in parameters] + parameters_filtered = { + key: value + for key, value in self.parameters.items() + if key.lower() in parameters_lower and value is not None + } + if len(parameters_filtered) != len(parameters): + msg = "Invalid parameters provided." + raise ProviderInvalidQueryError(msg, user_msg=msg) + + return parameters_filtered + @staticmethod def evaluate_layer(requested_coordinates: podpac.Coordinates, layer: pogc.Layer) -> podpac.UnitsDataArray | None: """Evaluate a layer using the requested coordinates. @@ -485,7 +573,7 @@ def interpret_crs(crs: str | None) -> str: Parameters ---------- - crs : str + crs : str | None The input CRS id string which needs to be validated/converted. Returns From 6bc4887b9729babffd1a3d0325717ee90a2df44b Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Tue, 3 Feb 2026 19:27:37 +0000 Subject: [PATCH 11/14] Testing addition and altitude/time change --- ogc/edr/edr_provider.py | 48 ++++++++------------------- ogc/edr/test/test_edr_provider.py | 55 ++++++++++++++++++++++--------- 2 files changed, 53 insertions(+), 50 deletions(-) diff --git a/ogc/edr/edr_provider.py b/ogc/edr/edr_provider.py index 682d546..0e437ad 100644 --- a/ogc/edr/edr_provider.py +++ b/ogc/edr/edr_provider.py @@ -170,7 +170,6 @@ def handle_query(self, requested_coordinates: podpac.Coordinates, **kwargs): len(altitude_coords["alt"].coordinates) > 1 and output_format == settings.GEOTIFF.lower(), "GeoTIFF output currently only supports single altitude requests.", ) - self.check_query_condition(len(requested_parameters) == 0, "Invalid parameters provided.") requested_coordinates = podpac.coordinates.merge_dims([altitude_coords, requested_coordinates]) # Handle defining native coordinates for the query, these should match between each layer @@ -276,7 +275,7 @@ def cube(self, **kwargs): if len(bbox) == 6: xmin, ymin, zmin, xmax, ymax, zmax = bbox # Set the z argument if not specified using a closed interval from the bounding box data - if kwargs["z"] is None: + if kwargs.get("z") is None: kwargs["z"] = f"{zmin}/{zmax}" else: xmin, ymin, xmax, ymax = bbox @@ -650,14 +649,14 @@ def interpret_altitude_coordinates( Raised if the provided altitude string is invalid. """ - if len(available_altitudes) == 0: + if not altitude_string and len(available_altitudes) == 0: return None - if not altitude_string: - return podpac.Coordinates([available_altitudes], dims=["alt"], crs=crs) try: altitudes = None - if "/" in altitude_string: + if not altitude_string: + altitudes = available_altitudes + elif "/" in altitude_string: altitudes_split = altitude_string.split("/") if len(altitudes_split) == 2: minimum = float(altitudes_split[0]) @@ -674,6 +673,10 @@ def interpret_altitude_coordinates( msg = "Invalid vertical level requested." raise ProviderInvalidQueryError(msg, user_msg=msg) + if altitudes is None: + msg = "Invalid vertical level requested." + raise ProviderInvalidQueryError(msg, user_msg=msg) + return podpac.Coordinates([altitudes], dims=["alt"], crs=crs) if altitudes is not None else None @staticmethod @@ -711,28 +714,25 @@ def interpret_time_coordinates( Raised if the provided time string is invalid. """ - if len(available_times) == 0: + if not time_string and len(available_times) == 0: return None try: times = None - np_available_times = np.array(available_times) if not time_string: - times = np_available_times + times = available_times elif "/" in time_string: times_split = time_string.split("/") if len(times_split) == 2: minimum = times_split[0] maximum = times_split[1] if minimum == "..": - times = [time for time in np_available_times if time <= np.datetime64(maximum)] + times = [time for time in available_times if time <= np.datetime64(maximum)] elif maximum == "..": - times = [time for time in np_available_times if time >= np.datetime64(minimum)] + times = [time for time in available_times if time >= np.datetime64(minimum)] else: times = [ - time - for time in np_available_times - if np.datetime64(minimum) <= time <= np.datetime64(maximum) + time for time in available_times if np.datetime64(minimum) <= time <= np.datetime64(maximum) ] else: times = [np.datetime64(time_string)] @@ -896,26 +896,6 @@ def check_query_condition(conditional: bool, message: str): if conditional: raise ProviderInvalidQueryError(message, user_msg=message) - @staticmethod - def validate_datetime(datetime_string: str) -> bool: - """Validate whether a string can be converted to a numpy datetime. - - Parameters - ---------- - date_string : str - The datetime string to be validated. - - Returns - ------- - bool - Whether the datetime string can be converted to a numpy datetime. - """ - try: - np.datetime64(datetime_string) - return True - except ValueError: - return False - @staticmethod def to_geotiff_response(dataset: Dict[str, podpac.UnitsDataArray], collection_id: str) -> Dict[str, Any]: """Generate a geotiff of the data for the provided parameters. diff --git a/ogc/edr/test/test_edr_provider.py b/ogc/edr/test/test_edr_provider.py index b01553d..51caa94 100644 --- a/ogc/edr/test/test_edr_provider.py +++ b/ogc/edr/test/test_edr_provider.py @@ -230,6 +230,8 @@ def test_edr_provider_position_request_invalid_property( """ base_url = "/" args = single_layer_cube_args_internal + del args["bbox"] + args["wkt"] = Point(5.2, 52.1) args["select_properties"] = "invalid" provider = EdrProvider(provider_def=get_provider_definition(base_url)) @@ -291,6 +293,30 @@ def test_edr_provider_cube_request_invalid_bbox( provider.cube(**args) +def test_edr_provider_cube_request_invalid_instance( + layers: List[pogc.Layer], single_layer_cube_args_internal: Dict[str, Any] +): + """Test the cube method of the EDR Provider class with an invalid instance. + + Parameters + ---------- + layers : List[pogc.Layer] + Layers provided by a test fixture. + + single_layer_cube_args_internal : Dict[str, Any] + Single layer arguments with internal pygeoapi keys provided by a test fixture. + """ + base_url = "/" + args = single_layer_cube_args_internal + args["instance"] = "invalid" + + provider = EdrProvider(provider_def=get_provider_definition(base_url)) + provider.set_layers(base_url, layers) + + with pytest.raises(ProviderInvalidQueryError): + provider.cube(**args) + + def test_edr_provider_cube_request_invalid_altitude( layers: List[pogc.Layer], single_layer_cube_args_internal: Dict[str, Any] ): @@ -459,7 +485,8 @@ def test_edr_provider_datetime_single_value(): """Test the datetime interpreter method of the EDR Provider class with a single datetime value.""" time_string = "2025-10-24" available_times = ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"] - expected_times = [np.datetime64(available_times[0])] + available_times = [np.datetime64(time) for time in available_times] + expected_times = available_times[0] time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None, None) @@ -470,10 +497,9 @@ def test_edr_provider_datetime_single_value(): def test_edr_provider_datetime_range_closed(): """Test the datetime interpreter method of the EDR Provider class with a closed datetime range.""" time_string = "2025-10-24/2025-10-26" - available_times = np.array( - ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"], dtype="datetime64" - ) - expected_times = [time for time in available_times[0:3]] + available_times = ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"] + available_times = [np.datetime64(time) for time in available_times] + expected_times = available_times[0:3] time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None, None) @@ -484,10 +510,9 @@ def test_edr_provider_datetime_range_closed(): def test_edr_provider_datetime_open_start(): """Test the datetime interpreter method of the EDR Provider class with a open datetime start.""" time_string = "../2025-10-27" - available_times = np.array( - ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"], dtype="datetime64" - ) - expected_times = [time for time in available_times[0:4]] + available_times = ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"] + available_times = [np.datetime64(time) for time in available_times] + expected_times = available_times[0:4] time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None, None) @@ -498,10 +523,9 @@ def test_edr_provider_datetime_open_start(): def test_edr_provider_datetime_open_end(): """Test the datetime interpreter method of the EDR Provider class with a open datetime end.""" time_string = "2025-10-25/.." - available_times = np.array( - ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"], dtype="datetime64" - ) - expected_times = [time for time in available_times[1:]] + available_times = ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"] + available_times = [np.datetime64(time) for time in available_times] + expected_times = available_times[1:] time_coords = EdrProvider.interpret_time_coordinates(available_times, time_string, None, None) @@ -512,9 +536,8 @@ def test_edr_provider_datetime_open_end(): def test_edr_provider_datetime_invalid_string(): """Test the datetime interpreter method of the EDR Provider class with an invalid string.""" time_string = "2025-10-25/../../.." - available_times = np.array( - ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"], dtype="datetime64" - ) + available_times = ["2025-10-24", "2025-10-25", "2025-10-26", "2025-10-27", "2025-10-28"] + available_times = [np.datetime64(time) for time in available_times] with pytest.raises(ProviderInvalidQueryError): EdrProvider.interpret_time_coordinates(available_times, time_string, None, None) From e3ef50387deacdc7ecacac1827b301518bd67584 Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Fri, 6 Feb 2026 13:34:35 +0000 Subject: [PATCH 12/14] Update formatters to use a common base class --- ogc/edr/edr_formatter.py | 39 ++++++++++++++++++--------------------- 1 file changed, 18 insertions(+), 21 deletions(-) diff --git a/ogc/edr/edr_formatter.py b/ogc/edr/edr_formatter.py index cf96c42..e3f3aab 100644 --- a/ogc/edr/edr_formatter.py +++ b/ogc/edr/edr_formatter.py @@ -2,8 +2,8 @@ from pygeoapi.util import to_json -class GeoTiffFormatter(BaseFormatter): - """Formatter for GeoTIFF data. Defined for format link information to be populated.""" +class BaseEdrFormatter(BaseFormatter): + """Base formatter for EDR data.""" def __init__(self, formatter_def: dict): """Initialize the formatter. @@ -15,9 +15,7 @@ def __init__(self, formatter_def: dict): """ super().__init__(formatter_def) - self.f = "geotiff" self.mimetype = formatter_def["mimetype"] - self.extension = "tiff" def write(self, options: dict = {}, data: dict | None = None) -> str: """Generate data in the specified format. @@ -37,8 +35,8 @@ def write(self, options: dict = {}, data: dict | None = None) -> str: return to_json(data, True) if data is not None else "" -class CoverageJsonFormatter(BaseFormatter): - """Formatter for CoverageJSON data. Defined for format link information to be populated.""" +class GeoTiffFormatter(BaseEdrFormatter): + """Formatter for GeoTIFF data. Defined for format link information to be populated.""" def __init__(self, formatter_def: dict): """Initialize the formatter. @@ -50,23 +48,22 @@ def __init__(self, formatter_def: dict): """ super().__init__(formatter_def) - self.f = "coveragejson" - self.mimetype = formatter_def["mimetype"] - self.extension = "json" + self.f = "geotiff" + self.extension = "tiff" - def write(self, options: dict = {}, data: dict | None = None) -> str: - """Generate data in the specified format. + +class CoverageJsonFormatter(BaseEdrFormatter): + """Formatter for CoverageJSON data. Defined for format link information to be populated.""" + + def __init__(self, formatter_def: dict): + """Initialize the formatter. Parameters ---------- - options : dict, optional - Formatting options, by default {}. - data : dict | None, optional - Dictionary representation of the data, by default None. - - Returns - ------- - str - String representation of the data. + formatter_def : dict + The formatter definition. """ - return to_json(data, True) if data is not None else "" + + super().__init__(formatter_def) + self.f = "coveragejson" + self.extension = "json" From 55d42b6af1d1e663b240d6dc830838732a5cad10 Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Fri, 6 Feb 2026 15:02:30 +0000 Subject: [PATCH 13/14] Add coverage for get altitudes method --- ogc/edr/test/test_edr_provider.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/ogc/edr/test/test_edr_provider.py b/ogc/edr/test/test_edr_provider.py index 51caa94..aab3db4 100644 --- a/ogc/edr/test/test_edr_provider.py +++ b/ogc/edr/test/test_edr_provider.py @@ -3,6 +3,7 @@ import zipfile import base64 import io +import podpac from shapely import Point, Polygon from typing import Dict, List, Any from ogc import podpac as pogc @@ -543,6 +544,19 @@ def test_edr_provider_datetime_invalid_string(): EdrProvider.interpret_time_coordinates(available_times, time_string, None, None) +def test_edr_provider_get_altitudes(): + """Test the get altitudes method of the EDR Provider class with a layer containing altitude data.""" + latitude = np.arange(1, 5) + longitude = np.arange(1, 5) + altitude = np.arange(1, 10) + data = np.random.default_rng(1).random((len(latitude), len(longitude), len(altitude))) + coords = podpac.Coordinates([latitude, longitude, altitude], dims=["lat", "lon", "alt"]) + node = podpac.data.Array(source=data, coordinates=coords) + layer = pogc.Layer(node=node, identifier="Test") + + np.testing.assert_array_equal(EdrProvider.get_altitudes([layer]), altitude) + + def test_edr_provider_altitude_single_value(): """Test the altitude interpreter method of the EDR Provider class with a single datetime value.""" altitude_string = "10" From 0f878ca83959d3a975c6e78b5b014526cf424916 Mon Sep 17 00:00:00 2001 From: Sam Cranford Date: Fri, 6 Feb 2026 15:04:13 +0000 Subject: [PATCH 14/14] Additional CRS addition for altitude --- ogc/edr/edr_config.py | 5 +---- ogc/edr/edr_provider.py | 2 +- ogc/edr/test/test_edr_provider.py | 10 +++++----- ogc/settings.py | 2 ++ 4 files changed, 9 insertions(+), 10 deletions(-) diff --git a/ogc/edr/edr_config.py b/ogc/edr/edr_config.py index b5a025a..4aa2a0b 100644 --- a/ogc/edr/edr_config.py +++ b/ogc/edr/edr_config.py @@ -107,10 +107,7 @@ def _resources_definition(base_url: str, layers: List[pogc.Layer]) -> Dict[str, "name": "ogc.edr.edr_provider.EdrProvider", "data": group_name, "base_url": base_url, - "crs": [ - settings.crs_84_uri_format, - settings.epsg_4326_uri_format, - ], + "crs": list(settings.EDR_CRS.keys()), "format": { "name": settings.GEOTIFF, "mimetype": "image/tiff", diff --git a/ogc/edr/edr_provider.py b/ogc/edr/edr_provider.py index 0e437ad..ada92fe 100644 --- a/ogc/edr/edr_provider.py +++ b/ogc/edr/edr_provider.py @@ -951,7 +951,7 @@ def get_native_coordinates( """ # Find intersections with target keeping source crs target_spatial_coordinates = podpac.Coordinates( - [target_coordinates["lat"], target_coordinates["lon"]], dims=["lat", "lon"] + [target_coordinates["lat"], target_coordinates["lon"]], dims=["lat", "lon"], crs=target_coordinates.crs ) source_intersection_coordinates = target_spatial_coordinates.intersect(source_coordinates, dims=["lat", "lon"]) source_intersection_coordinates = source_intersection_coordinates.transform(source_coordinates.crs) diff --git a/ogc/edr/test/test_edr_provider.py b/ogc/edr/test/test_edr_provider.py index aab3db4..0ee0dfa 100644 --- a/ogc/edr/test/test_edr_provider.py +++ b/ogc/edr/test/test_edr_provider.py @@ -6,6 +6,7 @@ import podpac from shapely import Point, Polygon from typing import Dict, List, Any +from ogc import settings from ogc import podpac as pogc from ogc.edr.edr_provider import EdrProvider from pygeoapi.provider.base import ProviderInvalidQueryError @@ -30,7 +31,7 @@ def get_provider_definition(base_url: str) -> Dict[str, Any]: "name": "ogc.edr.edr_provider.EdrProvider", "data": "Layers", "base_url": base_url, - "crs": ["http://www.opengis.net/def/crs/OGC/1.3/CRS84", "http://www.opengis.net/def/crs/EPSG/0/4326"], + "crs": list(settings.EDR_CRS.keys()), "format": {"name": "GeoJSON", "mimetype": "application/json"}, } @@ -616,13 +617,12 @@ def test_edr_provider_altitude_invalid_string(): def test_edr_provider_crs_interpreter_default_value(): """Test the CRS interpretation returns a default value when the argument is None.""" - assert EdrProvider.interpret_crs(None) == "http://www.opengis.net/def/crs/OGC/1.3/CRS84" + assert EdrProvider.interpret_crs(None) == settings.crs_84_uri_format def test_edr_provider_crs_interpreter_valid_value(): """Test the CRS interpretation returns a valid value when the argument is acceptable.""" - crs = "http://www.opengis.net/def/crs/EPSG/0/4326" - assert EdrProvider.interpret_crs(crs) == crs + assert EdrProvider.interpret_crs(settings.epsg_4326_uri_format) == settings.epsg_4326_uri_format def test_edr_provider_crs_interpreter_invalid_value(): @@ -640,4 +640,4 @@ def test_edr_provider_crs_converter(): lon = y lat = x - assert EdrProvider.crs_converter(x, y, "http://www.opengis.net/def/crs/EPSG/0/4326") == (lon, lat) + assert EdrProvider.crs_converter(x, y, crs=settings.epsg_4326_uri_format) == (lon, lat) diff --git a/ogc/settings.py b/ogc/settings.py index 8b590f2..8bdf166 100755 --- a/ogc/settings.py +++ b/ogc/settings.py @@ -12,6 +12,7 @@ crs_84 = "crs:84" epsg_4326 = "epsg:4326" crs_84_uri_format = "http://www.opengis.net/def/crs/OGC/1.3/CRS84" +crs_84h_uri_format = "http://www.opengis.net/def/crs/OGC/0/CRS84h" epsg_4326_uri_format = "http://www.opengis.net/def/crs/EPSG/0/4326" # Default/Supported WMS CRS/SRS @@ -35,6 +36,7 @@ EDR_CRS = { epsg_4326_uri_format: {"minx": -90.0, "miny": -180.0, "maxx": 90.0, "maxy": 180.0}, crs_84_uri_format: {"minx": -180.0, "miny": -90.0, "maxx": 180.0, "maxy": 90.0}, + crs_84h_uri_format: {"minx": -180.0, "miny": -90.0, "maxx": 180.0, "maxy": 90.0}, } # EDR query output formats