From 494f323ffbb6b5128fb6d76aeeb88fc4ef64e2eb Mon Sep 17 00:00:00 2001 From: Jeremi Joslin Date: Thu, 5 Mar 2026 14:17:42 +0700 Subject: [PATCH 1/6] feat(spp_api_v2_gis): add OGC API Features GIS endpoints --- spp_api_v2_gis/README.rst | 127 +++ spp_api_v2_gis/__init__.py | 5 + spp_api_v2_gis/__manifest__.py | 66 ++ .../data/qml_templates/graduated_polygon.qml | 16 + .../data/qml_templates/point_basic.qml | 34 + .../data/qml_templates/point_cluster.qml | 55 ++ spp_api_v2_gis/models/__init__.py | 4 + spp_api_v2_gis/models/api_client_scope.py | 18 + spp_api_v2_gis/models/fastapi_endpoint.py | 39 + spp_api_v2_gis/models/geofence.py | 281 +++++++ spp_api_v2_gis/pyproject.toml | 3 + spp_api_v2_gis/readme/DESCRIPTION.md | 76 ++ spp_api_v2_gis/routers/__init__.py | 7 + spp_api_v2_gis/routers/export.py | 131 +++ spp_api_v2_gis/routers/geofence.py | 307 +++++++ spp_api_v2_gis/routers/ogc_features.py | 388 +++++++++ spp_api_v2_gis/routers/proximity.py | 81 ++ spp_api_v2_gis/routers/spatial_query.py | 137 ++++ spp_api_v2_gis/routers/statistics.py | 91 +++ spp_api_v2_gis/schemas/__init__.py | 5 + spp_api_v2_gis/schemas/geofence.py | 45 ++ spp_api_v2_gis/schemas/geojson.py | 35 + spp_api_v2_gis/schemas/ogc.py | 83 ++ spp_api_v2_gis/schemas/query.py | 195 +++++ spp_api_v2_gis/schemas/statistics.py | 30 + spp_api_v2_gis/security/ir.model.access.csv | 5 + spp_api_v2_gis/services/__init__.py | 16 + spp_api_v2_gis/services/catalog_service.py | 167 ++++ spp_api_v2_gis/services/export_service.py | 320 ++++++++ spp_api_v2_gis/services/layers_service.py | 765 ++++++++++++++++++ spp_api_v2_gis/services/ogc_service.py | 515 ++++++++++++ .../services/qml_template_service.py | 394 +++++++++ .../services/spatial_query_service.py | 748 +++++++++++++++++ spp_api_v2_gis/static/description/index.html | 472 +++++++++++ spp_api_v2_gis/tests/__init__.py | 12 + spp_api_v2_gis/tests/test_batch_query.py | 351 ++++++++ spp_api_v2_gis/tests/test_catalog_service.py | 436 ++++++++++ spp_api_v2_gis/tests/test_export_service.py | 471 +++++++++++ spp_api_v2_gis/tests/test_geofence_model.py | 447 ++++++++++ spp_api_v2_gis/tests/test_layers_service.py | 729 +++++++++++++++++ spp_api_v2_gis/tests/test_ogc_features.py | 619 ++++++++++++++ spp_api_v2_gis/tests/test_ogc_http.py | 385 +++++++++ spp_api_v2_gis/tests/test_proximity_query.py | 352 ++++++++ .../tests/test_qml_template_service.py | 414 ++++++++++ .../tests/test_spatial_query_service.py | 523 ++++++++++++ .../tests/test_statistics_endpoint.py | 301 +++++++ 46 files changed, 10701 insertions(+) create mode 100644 spp_api_v2_gis/README.rst create mode 100644 spp_api_v2_gis/__init__.py create mode 100644 spp_api_v2_gis/__manifest__.py create mode 100644 spp_api_v2_gis/data/qml_templates/graduated_polygon.qml create mode 100644 spp_api_v2_gis/data/qml_templates/point_basic.qml create mode 100644 spp_api_v2_gis/data/qml_templates/point_cluster.qml create mode 100644 spp_api_v2_gis/models/__init__.py create mode 100644 spp_api_v2_gis/models/api_client_scope.py create mode 100644 spp_api_v2_gis/models/fastapi_endpoint.py create mode 100644 spp_api_v2_gis/models/geofence.py create mode 100644 spp_api_v2_gis/pyproject.toml create mode 100644 spp_api_v2_gis/readme/DESCRIPTION.md create mode 100644 spp_api_v2_gis/routers/__init__.py create mode 100644 spp_api_v2_gis/routers/export.py create mode 100644 spp_api_v2_gis/routers/geofence.py create mode 100644 spp_api_v2_gis/routers/ogc_features.py create mode 100644 spp_api_v2_gis/routers/proximity.py create mode 100644 spp_api_v2_gis/routers/spatial_query.py create mode 100644 spp_api_v2_gis/routers/statistics.py create mode 100644 spp_api_v2_gis/schemas/__init__.py create mode 100644 spp_api_v2_gis/schemas/geofence.py create mode 100644 spp_api_v2_gis/schemas/geojson.py create mode 100644 spp_api_v2_gis/schemas/ogc.py create mode 100644 spp_api_v2_gis/schemas/query.py create mode 100644 spp_api_v2_gis/schemas/statistics.py create mode 100644 spp_api_v2_gis/security/ir.model.access.csv create mode 100644 spp_api_v2_gis/services/__init__.py create mode 100644 spp_api_v2_gis/services/catalog_service.py create mode 100644 spp_api_v2_gis/services/export_service.py create mode 100644 spp_api_v2_gis/services/layers_service.py create mode 100644 spp_api_v2_gis/services/ogc_service.py create mode 100644 spp_api_v2_gis/services/qml_template_service.py create mode 100644 spp_api_v2_gis/services/spatial_query_service.py create mode 100644 spp_api_v2_gis/static/description/index.html create mode 100644 spp_api_v2_gis/tests/__init__.py create mode 100644 spp_api_v2_gis/tests/test_batch_query.py create mode 100644 spp_api_v2_gis/tests/test_catalog_service.py create mode 100644 spp_api_v2_gis/tests/test_export_service.py create mode 100644 spp_api_v2_gis/tests/test_geofence_model.py create mode 100644 spp_api_v2_gis/tests/test_layers_service.py create mode 100644 spp_api_v2_gis/tests/test_ogc_features.py create mode 100644 spp_api_v2_gis/tests/test_ogc_http.py create mode 100644 spp_api_v2_gis/tests/test_proximity_query.py create mode 100644 spp_api_v2_gis/tests/test_qml_template_service.py create mode 100644 spp_api_v2_gis/tests/test_spatial_query_service.py create mode 100644 spp_api_v2_gis/tests/test_statistics_endpoint.py diff --git a/spp_api_v2_gis/README.rst b/spp_api_v2_gis/README.rst new file mode 100644 index 00000000..c7e1a7d1 --- /dev/null +++ b/spp_api_v2_gis/README.rst @@ -0,0 +1,127 @@ +.. image:: https://odoo-community.org/readme-banner-image + :target: https://odoo-community.org/get-involved?utm_source=readme + :alt: Odoo Community Association + +=============== +OpenSPP GIS API +=============== + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:b0c4c426ac92738100187cd709693fbf8d1a849f2a7a31b69b9874a834376f1c + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Alpha-red.png + :target: https://odoo-community.org/page/development-status + :alt: Alpha +.. |badge2| image:: https://img.shields.io/badge/license-LGPL--3-blue.png + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-OpenSPP%2Fopenspp--modules-lightgray.png?logo=github + :target: https://github.com/OpenSPP/openspp-modules/tree/19.0/spp_api_v2_gis + :alt: OpenSPP/openspp-modules + +|badge1| |badge2| |badge3| + +REST API for QGIS plugin integration, providing GeoJSON endpoints, +spatial queries, and geofence management. + +Key Features +------------ + +- **Layer Catalog**: List available GIS layers and reports +- **GeoJSON Export**: Get pre-aggregated layer data for QGIS +- **QML Styling**: Fetch QGIS style files for consistent visualization +- **Spatial Queries**: Query registrant statistics within arbitrary + polygons using PostGIS +- **Geofence Management**: Save and manage areas of interest + +Architecture +------------ + +Follows thin client architecture where QGIS displays data and OpenSPP +performs all computation: + +- All spatial queries executed in PostGIS for performance +- Pre-aggregated data returned to minimize data transfer +- Configuration-driven styling using QML templates +- OAuth 2.0 authentication with scope-based access control + +API Endpoints +------------- + +- ``GET /gis/catalog`` - List available layers and reports +- ``GET /gis/layers/{id}`` - Get layer as GeoJSON FeatureCollection +- ``GET /gis/layers/{id}/qml`` - Get QGIS style file +- ``POST /gis/query/statistics`` - Query statistics for polygon +- ``POST /gis/geofences`` - Create geofence +- ``GET /gis/geofences`` - List geofences +- ``GET /gis/geofences/{id}`` - Get single geofence as GeoJSON +- ``DELETE /gis/geofences/{id}`` - Archive geofence +- ``GET /gis/export/geopackage`` - Export layers for offline use + +Required Scopes +--------------- + +- ``gis:read`` - View layers and statistics +- ``gis:geofence`` - Create and manage geofences + +Dependencies +------------ + +- ``spp_api_v2`` - FastAPI infrastructure +- ``spp_gis`` - PostGIS integration +- ``spp_gis_report`` - Report configuration +- ``spp_area`` - Administrative area data + +.. IMPORTANT:: + This is an alpha version, the data model and design can change at any time without warning. + Only for development or testing purpose, do not use in production. + `More details on development status `_ + +**Table of contents** + +.. contents:: + :local: + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Authors +------- + +* OpenSPP.org + +Maintainers +----------- + +.. |maintainer-jeremi| image:: https://github.com/jeremi.png?size=40px + :target: https://github.com/jeremi + :alt: jeremi +.. |maintainer-gonzalesedwin1123| image:: https://github.com/gonzalesedwin1123.png?size=40px + :target: https://github.com/gonzalesedwin1123 + :alt: gonzalesedwin1123 +.. |maintainer-reichie020212| image:: https://github.com/reichie020212.png?size=40px + :target: https://github.com/reichie020212 + :alt: reichie020212 + +Current maintainers: + +|maintainer-jeremi| |maintainer-gonzalesedwin1123| |maintainer-reichie020212| + +This module is part of the `OpenSPP/openspp-modules `_ project on GitHub. + +You are welcome to contribute. diff --git a/spp_api_v2_gis/__init__.py b/spp_api_v2_gis/__init__.py new file mode 100644 index 00000000..e997213a --- /dev/null +++ b/spp_api_v2_gis/__init__.py @@ -0,0 +1,5 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +from . import models +from . import routers +from . import schemas +from . import services diff --git a/spp_api_v2_gis/__manifest__.py b/spp_api_v2_gis/__manifest__.py new file mode 100644 index 00000000..4328fa08 --- /dev/null +++ b/spp_api_v2_gis/__manifest__.py @@ -0,0 +1,66 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +{ + "name": "OpenSPP GIS API", + "category": "OpenSPP/Integration", + "version": "19.0.2.0.0", + "sequence": 1, + "author": "OpenSPP.org", + "website": "https://github.com/OpenSPP/OpenSPP2", + "license": "LGPL-3", + "development_status": "Production/Stable", + "maintainers": ["jeremi", "gonzalesedwin1123", "reichie020212"], + "depends": [ + "spp_api_v2", + "spp_gis", + "spp_gis_report", + "spp_area", + "spp_hazard", + "spp_vocabulary", + "spp_statistic", + "spp_aggregation", + ], + "data": [ + "security/ir.model.access.csv", + ], + "assets": {}, + "demo": [], + "images": [], + "application": False, + "installable": True, + "auto_install": False, + "summary": """ + OGC API - Features compliant GIS endpoints for QGIS and GovStack GIS BB. + """, + "description": """ +OpenSPP GIS API +=============== + +Extends OpenSPP API V2 with OGC API - Features compliant endpoints, +enabling GovStack GIS Building Block compliance and interoperability +with any OGC client (QGIS, ArcGIS, Leaflet, ogr2ogr, etc.). + +OGC API - Features Endpoints +----------------------------- +- ``GET /gis/ogc/`` - Landing page +- ``GET /gis/ogc/conformance`` - Conformance declaration +- ``GET /gis/ogc/collections`` - List feature collections +- ``GET /gis/ogc/collections/{id}`` - Collection metadata +- ``GET /gis/ogc/collections/{id}/items`` - Feature items (GeoJSON) +- ``GET /gis/ogc/collections/{id}/items/{fid}`` - Single feature +- ``GET /gis/ogc/collections/{id}/qml`` - QGIS style file (extension) + +Proprietary Endpoints +--------------------- +- ``POST /gis/query/statistics`` - Spatial statistics query +- ``CRUD /gis/geofences`` - Manage saved areas of interest +- ``GET /gis/export/geopackage`` - Export layers for offline use + +Design Principles +----------------- +- OGC API - Features Core + GeoJSON conformance +- Thin client architecture (QGIS displays, OpenSPP computes) +- Pre-aggregated data for performance +- PostGIS spatial queries +- Requires authentication via OAuth 2.0 + """, +} diff --git a/spp_api_v2_gis/data/qml_templates/graduated_polygon.qml b/spp_api_v2_gis/data/qml_templates/graduated_polygon.qml new file mode 100644 index 00000000..2b313d6a --- /dev/null +++ b/spp_api_v2_gis/data/qml_templates/graduated_polygon.qml @@ -0,0 +1,16 @@ + + + + + {{RANGES}} + + + {{SYMBOLS}} + + + + + 0 + 0 + {{OPACITY}} + diff --git a/spp_api_v2_gis/data/qml_templates/point_basic.qml b/spp_api_v2_gis/data/qml_templates/point_basic.qml new file mode 100644 index 00000000..4bcf4fa3 --- /dev/null +++ b/spp_api_v2_gis/data/qml_templates/point_basic.qml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 0 + 0 + {{OPACITY}} + diff --git a/spp_api_v2_gis/data/qml_templates/point_cluster.qml b/spp_api_v2_gis/data/qml_templates/point_cluster.qml new file mode 100644 index 00000000..38a5b938 --- /dev/null +++ b/spp_api_v2_gis/data/qml_templates/point_cluster.qml @@ -0,0 +1,55 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 0 + 0 + {{OPACITY}} + diff --git a/spp_api_v2_gis/models/__init__.py b/spp_api_v2_gis/models/__init__.py new file mode 100644 index 00000000..d54fa7da --- /dev/null +++ b/spp_api_v2_gis/models/__init__.py @@ -0,0 +1,4 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +from . import api_client_scope +from . import fastapi_endpoint +from . import geofence diff --git a/spp_api_v2_gis/models/api_client_scope.py b/spp_api_v2_gis/models/api_client_scope.py new file mode 100644 index 00000000..a9c767de --- /dev/null +++ b/spp_api_v2_gis/models/api_client_scope.py @@ -0,0 +1,18 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Extends API client scope to support GIS resources.""" + +from odoo import fields, models + + +class ApiClientScope(models.Model): + """Extend API client scope to include GIS resources.""" + + _inherit = "spp.api.client.scope" + + resource = fields.Selection( + selection_add=[ + ("gis", "GIS"), + ("statistics", "Statistics"), + ], + ondelete={"gis": "cascade", "statistics": "cascade"}, + ) diff --git a/spp_api_v2_gis/models/fastapi_endpoint.py b/spp_api_v2_gis/models/fastapi_endpoint.py new file mode 100644 index 00000000..6261e991 --- /dev/null +++ b/spp_api_v2_gis/models/fastapi_endpoint.py @@ -0,0 +1,39 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Extend FastAPI endpoint to include GIS routers.""" + +import logging + +from odoo import models + +from fastapi import APIRouter + +_logger = logging.getLogger(__name__) + + +class SppApiV2GisEndpoint(models.Model): + """Extend FastAPI endpoint for GIS API.""" + + _inherit = "fastapi.endpoint" + + def _get_fastapi_routers(self) -> list[APIRouter]: + """Add GIS routers to API V2.""" + routers = super()._get_fastapi_routers() + if self.app == "api_v2": + from ..routers.export import export_router + from ..routers.geofence import geofence_router + from ..routers.ogc_features import ogc_features_router + from ..routers.proximity import proximity_router + from ..routers.spatial_query import spatial_query_router + from ..routers.statistics import statistics_router + + routers.extend( + [ + ogc_features_router, + export_router, + geofence_router, + proximity_router, + spatial_query_router, + statistics_router, + ] + ) + return routers diff --git a/spp_api_v2_gis/models/geofence.py b/spp_api_v2_gis/models/geofence.py new file mode 100644 index 00000000..681913e3 --- /dev/null +++ b/spp_api_v2_gis/models/geofence.py @@ -0,0 +1,281 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Geofence model for saved geographic areas of interest.""" + +import json +import logging + +from shapely.geometry import mapping + +from odoo import _, api, fields, models +from odoo.exceptions import ValidationError + +_logger = logging.getLogger(__name__) + + +class GisGeofence(models.Model): + """Saved Geographic Areas of Interest. + + Geofences are user-defined polygons that can be: + - Created from QGIS plugin + - Used for spatial queries and reports + - Tagged for classification + - Linked to hazard incidents for disaster management + """ + + _name = "spp.gis.geofence" + _description = "Saved Geographic Areas of Interest" + _inherit = ["mail.thread", "mail.activity.mixin"] + _order = "name" + + name = fields.Char( + required=True, + tracking=True, + help="Name of this geofence", + ) + description = fields.Text( + tracking=True, + help="Description of this area of interest", + ) + + # Geometry field using GeoPolygonField from spp_gis + geometry = fields.GeoPolygonField( + string="Geographic Polygon", + required=True, + help="Geographic boundary as polygon or multipolygon", + ) + + # Classification + geofence_type = fields.Selection( + [ + ("hazard_zone", "Hazard Zone"), + ("service_area", "Service Area"), + ("targeting_area", "Targeting Area"), + ("custom", "Custom Area"), + ], + default="custom", + required=True, + tracking=True, + help="Type of geofence", + ) + + # Tags for flexible classification + tag_ids = fields.Many2many( + "spp.vocabulary", + "spp_gis_geofence_tag_rel", + "geofence_id", + "tag_id", + string="Tags", + help="Classification tags for this geofence", + ) + + # Optional relationship to hazard incident + incident_id = fields.Many2one( + "spp.hazard.incident", + string="Related Incident", + ondelete="set null", + tracking=True, + help="Hazard incident associated with this geofence (if applicable)", + ) + + # Status + active = fields.Boolean( + default=True, + tracking=True, + help="Uncheck to archive this geofence", + ) + + # Audit fields + created_by_id = fields.Many2one( + "res.users", + string="Created By", + default=lambda self: self.env.user, + readonly=True, + help="User who created this geofence", + ) + created_from = fields.Selection( + [ + ("qgis", "QGIS Plugin"), + ("api", "External API"), + ("ui", "OpenSPP UI"), + ], + default="ui", + required=True, + string="Created From", + help="Source of geofence creation", + ) + + # Computed area in square kilometers + area_sqkm = fields.Float( + string="Area (sq km)", + compute="_compute_area_sqkm", + store=True, + help="Area of the polygon in square kilometers (computed from geometry)", + ) + + @api.depends("geometry") + def _compute_area_sqkm(self): + """Compute area in square kilometers from geometry using PostGIS. + + Uses ST_Area with geography type for accurate area calculation + in square meters, then converts to square kilometers. + """ + for rec in self: + if not rec.geometry or not rec.id: + rec.area_sqkm = 0.0 + continue + + try: + # Use PostGIS ST_Area with geography cast for accurate measurement + # Geography type automatically uses spheroid calculations + query = """ + SELECT ST_Area(ST_Transform(geometry::geometry, 4326)::geography) / 1000000.0 as area_sqkm + FROM spp_gis_geofence + WHERE id = %s + """ + self.env.cr.execute(query, (rec.id,)) + result = self.env.cr.fetchone() + rec.area_sqkm = result[0] if result else 0.0 + except Exception as e: + _logger.warning("Failed to compute area for geofence %s: %s", rec.name, str(e)) + rec.area_sqkm = 0.0 + + @api.constrains("name", "active") + def _check_name_unique_active(self): + """Ensure name is unique among active geofences.""" + for rec in self: + if rec.active: + existing = self.search( + [ + ("name", "=", rec.name), + ("active", "=", True), + ("id", "!=", rec.id), + ], + limit=1, + ) + if existing: + raise ValidationError( + _("A geofence with the name '%s' already exists. Please use a unique name.") % rec.name + ) + + @api.constrains("geometry") + def _check_geometry_valid(self): + """Validate that geometry is not empty and is a valid polygon.""" + for rec in self: + if not rec.geometry: + raise ValidationError(_("Geometry cannot be empty.")) + + # Geometry validity is handled by the GeoPolygonField itself + # We just ensure it exists and is not empty + + def to_geojson(self): + """Return GeoJSON Feature representation of this geofence. + + Returns: + dict: GeoJSON Feature with geometry and properties + """ + self.ensure_one() + + if not self.geometry: + return { + "type": "Feature", + "geometry": None, + "properties": self._get_geojson_properties(), + } + + # Convert shapely geometry to GeoJSON + try: + geometry_dict = mapping(self.geometry) + except Exception as e: + _logger.warning("Failed to convert geometry to GeoJSON for geofence %s: %s", self.name, str(e)) + geometry_dict = None + + return { + "type": "Feature", + "geometry": geometry_dict, + "properties": self._get_geojson_properties(), + } + + def _get_geojson_properties(self): + """Get properties dictionary for GeoJSON representation. + + Returns: + dict: Properties including name, type, tags, etc. + """ + self.ensure_one() + + return { + "id": self.id, + "name": self.name, + "description": self.description or "", + "geofence_type": self.geofence_type, + "geofence_type_label": dict(self._fields["geofence_type"].selection).get(self.geofence_type, ""), + "area_sqkm": self.area_sqkm, + "tags": self.tag_ids.mapped("name"), + "incident_id": self.incident_id.id if self.incident_id else None, + "incident_name": self.incident_id.name if self.incident_id else None, + "created_from": self.created_from, + "created_by": self.created_by_id.name, + "create_date": self.create_date.isoformat() if self.create_date else None, + } + + def to_geojson_collection(self): + """Return GeoJSON FeatureCollection for multiple geofences. + + Returns: + dict: GeoJSON FeatureCollection with all features + """ + features = [rec.to_geojson() for rec in self] + return { + "type": "FeatureCollection", + "features": features, + } + + @api.model + def create_from_geojson(self, geojson_str, name, geofence_type="custom", created_from="api", **kwargs): + """Create a geofence from GeoJSON string. + + Args: + geojson_str: GeoJSON string (Feature or FeatureCollection) + name: Name for the geofence + geofence_type: Type of geofence (default: custom) + created_from: Source of creation (default: api) + **kwargs: Additional field values + + Returns: + Created geofence record + + Raises: + ValidationError: If GeoJSON is invalid + """ + try: + geojson_data = json.loads(geojson_str) if isinstance(geojson_str, str) else geojson_str + except json.JSONDecodeError as e: + raise ValidationError(_("Invalid GeoJSON format: %s") % str(e)) from e + + # Handle FeatureCollection or Feature + if geojson_data.get("type") == "FeatureCollection": + if not geojson_data.get("features"): + raise ValidationError(_("FeatureCollection must contain at least one feature")) + # Use first feature's geometry + geometry = geojson_data["features"][0].get("geometry") + elif geojson_data.get("type") == "Feature": + geometry = geojson_data.get("geometry") + else: + # Assume it's a raw geometry + geometry = geojson_data + + if not geometry: + raise ValidationError(_("No geometry found in GeoJSON")) + + # Convert geometry dict to GeoJSON string for the GeoPolygonField + geometry_str = json.dumps(geometry) + + vals = { + "name": name, + "geometry": geometry_str, + "geofence_type": geofence_type, + "created_from": created_from, + } + vals.update(kwargs) + + return self.create(vals) diff --git a/spp_api_v2_gis/pyproject.toml b/spp_api_v2_gis/pyproject.toml new file mode 100644 index 00000000..4231d0cc --- /dev/null +++ b/spp_api_v2_gis/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["whool"] +build-backend = "whool.buildapi" diff --git a/spp_api_v2_gis/readme/DESCRIPTION.md b/spp_api_v2_gis/readme/DESCRIPTION.md new file mode 100644 index 00000000..258533b5 --- /dev/null +++ b/spp_api_v2_gis/readme/DESCRIPTION.md @@ -0,0 +1,76 @@ +REST API for QGIS plugin integration, providing OGC API - Features endpoints, spatial queries, and geofence management. + +## Key Features + +- **OGC API - Features**: Standards-compliant feature collections (GovStack GIS BB) +- **GeoJSON Export**: Get pre-aggregated layer data for QGIS +- **QML Styling**: Fetch QGIS style files for consistent visualization +- **Spatial Queries**: Query registrant statistics within arbitrary polygons using PostGIS +- **Geofence Management**: Save and manage areas of interest + +## Architecture + +Follows thin client architecture where QGIS displays data and OpenSPP performs all computation: + +- All spatial queries executed in PostGIS for performance (including bbox via ST_Intersects) +- Pre-aggregated data returned to minimize data transfer +- Configuration-driven styling using QML templates +- JWT authentication with scope-based access control + +## API Endpoints + +### OGC API - Features (primary interface) + +| Endpoint | Method | Description | +|----------|--------|-------------| +| `/gis/ogc/` | GET | OGC API landing page | +| `/gis/ogc/conformance` | GET | OGC conformance classes | +| `/gis/ogc/collections` | GET | List feature collections | +| `/gis/ogc/collections/{id}` | GET | Collection metadata | +| `/gis/ogc/collections/{id}/items` | GET | Feature items (GeoJSON) | +| `/gis/ogc/collections/{id}/items/{fid}` | GET | Single feature | +| `/gis/ogc/collections/{id}/qml` | GET | QGIS style file (extension) | + +### Additional endpoints + +| Endpoint | Method | Description | +|----------|--------|-------------| +| `/gis/query/statistics` | POST | Query stats for polygon | +| `/gis/geofences` | POST/GET | Geofence management | +| `/gis/geofences/{id}` | GET/DELETE | Single geofence | +| `/gis/export/geopackage` | GET | Export for offline use | + +## Scopes and Data Privacy + +### OAuth Scopes + +| Scope | Access | Description | +|-------|--------|-------------| +| `gis:read` | Read-only | View collections, layers, statistics, export data | +| `gis:geofence` | Read + Write | Create and archive geofences (also requires `gis:read` for listing) | + +### What data is exposed + +**Aggregated statistics only.** No endpoint in this module returns individual registrant records. + +- **OGC collections/items**: Return GeoJSON features organized by administrative area, with pre-computed aggregate values (counts, percentages). Each feature represents an *area*, not a person. +- **Spatial query statistics** (`POST /gis/query/statistics`): Accepts a GeoJSON polygon and returns configured aggregate statistics computed by `spp.aggregation.service`. Individual registrant IDs are computed internally for aggregation but are **explicitly stripped** from the response before it is sent (see `spatial_query.py`). +- **Exports** (GeoPackage/GeoJSON): Contain the same area-level aggregated layer data, not registrant-level records. +- **Geofences**: Store only geometry and metadata — no registrant data. + +### Privacy controls + +- **K-anonymity suppression**: Statistics backed by CEL variables can apply k-anonymity thresholds. When a cell count falls below the configured minimum, the value is replaced with a suppression marker and flagged as `"suppressed": true` in the response. This prevents re-identification in small populations. +- **CEL variable configuration**: Administrators control which statistics are published and their suppression thresholds via `spp.statistic` records. +- **Scope separation**: `gis:read` and `gis:geofence` are separate scopes, allowing clients to be granted read-only access without write capability. + +### Design rationale + +This module follows a **thin client** architecture: QGIS (or any OGC-compatible client) displays pre-aggregated data, while OpenSPP retains all individual-level data server-side. This ensures that GIS API clients — including the QGIS plugin — never need access to personally identifiable information. + +## Dependencies + +- `spp_api_v2` - FastAPI infrastructure +- `spp_gis` - PostGIS integration +- `spp_gis_report` - Report configuration +- `spp_area` - Administrative area data diff --git a/spp_api_v2_gis/routers/__init__.py b/spp_api_v2_gis/routers/__init__.py new file mode 100644 index 00000000..d5e170ae --- /dev/null +++ b/spp_api_v2_gis/routers/__init__.py @@ -0,0 +1,7 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +from . import export +from . import geofence +from . import ogc_features +from . import proximity +from . import spatial_query +from . import statistics diff --git a/spp_api_v2_gis/routers/export.py b/spp_api_v2_gis/routers/export.py new file mode 100644 index 00000000..6eb3081c --- /dev/null +++ b/spp_api_v2_gis/routers/export.py @@ -0,0 +1,131 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Export API endpoints for GeoPackage and offline data.""" + +import logging +from typing import Annotated + +from odoo.api import Environment + +from odoo.addons.fastapi.dependencies import odoo_env +from odoo.addons.spp_api_v2.middleware.auth import get_authenticated_client + +from fastapi import APIRouter, Depends, HTTPException, Query, Response, status + +from ..services.export_service import ExportService + +_logger = logging.getLogger(__name__) + +export_router = APIRouter(tags=["GIS"], prefix="/gis") + + +@export_router.get( + "/export/geopackage", + summary="Export layers as GeoPackage", + description="Export layers and geofences as GeoPackage (.gpkg) or ZIP of GeoJSON files for offline use in QGIS.", + response_class=Response, +) +async def export_geopackage( + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], + layer_ids: Annotated[ + str | None, + Query(description="Comma-separated list of report codes to include (exports all if omitted)"), + ] = None, + include_geofences: Annotated[ + bool, + Query(description="Include user's geofences in export"), + ] = True, + admin_level: Annotated[ + int | None, + Query(description="Filter layers by admin level"), + ] = None, +): + """Export GIS data as GeoPackage for offline use. + + This endpoint exports selected layers and geofences in a format suitable for + offline use in QGIS or other GIS applications. + + **Export Format:** + - Attempts to create a GeoPackage (.gpkg) file if fiona is available + - Falls back to ZIP of GeoJSON files (.geojson) if GeoPackage creation fails + - Both formats are compatible with QGIS 3.28+ + + **What's Included:** + - Each requested layer as a separate table/file + - User's active geofences (if include_geofences=true) + - Proper geometry columns and CRS (EPSG:4326) + + **Usage in QGIS:** + - GeoPackage: Drag and drop .gpkg file into QGIS + - ZIP: Extract and drag .geojson files into QGIS + - Load corresponding .qml style files for proper visualization + + Args: + env: Odoo environment + api_client: Authenticated API client + layer_ids: Comma-separated report codes (e.g., "pop_density,poverty_rate") + include_geofences: Whether to include user's saved geofences + admin_level: Filter layers to specific admin level (e.g., 2 for districts) + + Returns: + Response: Binary file download (GeoPackage or ZIP) + + Raises: + HTTPException: 403 if missing scope, 400 if no data available + """ + # Check read scope + if not api_client.has_scope("gis", "read"): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Client does not have gis:read scope", + ) + + try: + # Parse layer_ids + layer_ids_list = None + if layer_ids: + layer_ids_list = [code.strip() for code in layer_ids.split(",") if code.strip()] + + # Get export service + export_service = ExportService(env) + + # Generate export + file_bytes, filename, content_type = export_service.export_geopackage( + layer_ids=layer_ids_list, + include_geofences=include_geofences, + admin_level=admin_level, + ) + + # Log export operation + layer_count = len(layer_ids_list) if layer_ids_list else "all" + _logger.info( + "GIS export completed: %d bytes, format=%s, layers=%s, geofences=%s, admin_level=%s", + len(file_bytes), + content_type, + layer_count, + include_geofences, + admin_level, + ) + + # Return file download + return Response( + content=file_bytes, + media_type=content_type, + headers={ + "Content-Disposition": f'attachment; filename="{filename}"', + "Content-Length": str(len(file_bytes)), + }, + ) + + except ValueError as e: + _logger.warning("Invalid export request: %s", str(e)) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) from e + except Exception as e: + _logger.error("Error exporting GIS data: %s", str(e), exc_info=True) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to export GIS data", + ) from e diff --git a/spp_api_v2_gis/routers/geofence.py b/spp_api_v2_gis/routers/geofence.py new file mode 100644 index 00000000..8a41191a --- /dev/null +++ b/spp_api_v2_gis/routers/geofence.py @@ -0,0 +1,307 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Geofence API endpoints for saving areas of interest.""" + +import json +import logging +from typing import Annotated + +from odoo.api import Environment +from odoo.exceptions import ValidationError + +from odoo.addons.fastapi.dependencies import odoo_env +from odoo.addons.spp_api_v2.middleware.auth import get_authenticated_client + +from fastapi import APIRouter, Depends, HTTPException, Path, Query, Response, status + +from ..schemas.geofence import ( + GeofenceCreateRequest, + GeofenceListItem, + GeofenceListResponse, + GeofenceResponse, +) + +_logger = logging.getLogger(__name__) + +geofence_router = APIRouter(tags=["GIS"], prefix="/gis") + + +@geofence_router.post( + "/geofences", + response_model=GeofenceResponse, + status_code=status.HTTP_201_CREATED, + summary="Create geofence", + description="Save area of interest as a geofence from GeoJSON.", +) +async def create_geofence( + request: GeofenceCreateRequest, + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], + response: Response, +): + """Create a new geofence from GeoJSON. + + Args: + request: Geofence creation request with name, geometry, and optional fields + env: Odoo environment + api_client: Authenticated API client + response: FastAPI response object for setting headers + + Returns: + GeofenceResponse with created geofence details + + Raises: + HTTPException: 403 if missing scope, 422 if validation fails + """ + # Check geofence scope + if not api_client.has_scope("gis", "geofence"): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Client does not have gis:geofence scope", + ) + + # Get the geofence model + geofence_model = env["spp.gis.geofence"] + + # Prepare kwargs for optional fields + kwargs = {} + if request.description: + kwargs["description"] = request.description + + # Handle incident_code if provided + if request.incident_code: + # Find incident by code (external ID) + incident = env["spp.hazard.incident"].search([("code", "=", request.incident_code)], limit=1) + if not incident: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Incident with code '{request.incident_code}' not found", + ) + kwargs["incident_id"] = incident.id + + try: + # Convert geometry dict to JSON string for create_from_geojson + geometry_json = json.dumps(request.geometry) + + # Create geofence using model method + geofence = geofence_model.create_from_geojson( + geojson_str=geometry_json, + name=request.name, + geofence_type=request.geofence_type, + created_from="api", + **kwargs, + ) + + # Set Location header + response.headers["Location"] = f"/api/v2/spp/gis/geofences/{geofence.id}" + + # Return response + return GeofenceResponse( + id=geofence.id, + name=geofence.name, + description=geofence.description, + geofence_type=geofence.geofence_type, + area_sqkm=geofence.area_sqkm, + active=geofence.active, + created_from=geofence.created_from, + ) + + except ValidationError as e: + _logger.warning("Validation error creating geofence: %s", str(e)) + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + detail=str(e), + ) from e + except Exception as e: + _logger.exception("Error creating geofence") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Failed to create geofence: {str(e)}", + ) from e + + +@geofence_router.get( + "/geofences", + response_model=GeofenceListResponse, + summary="List geofences", + description="List saved geofences with optional filtering and pagination.", +) +async def list_geofences( + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], + geofence_type: Annotated[str | None, Query()] = None, + incident_id: Annotated[int | None, Query()] = None, + active: Annotated[bool | None, Query()] = None, + count: Annotated[int, Query(alias="_count", ge=1, le=100)] = 20, + offset: Annotated[int, Query(alias="_offset", ge=0)] = 0, +): + """List geofences with optional filters. + + Args: + env: Odoo environment + api_client: Authenticated API client + geofence_type: Filter by geofence type (hazard_zone, service_area, targeting_area, custom) + incident_id: Filter by related incident ID + active: Filter by active status (default: True) + count: Number of results per page (default: 20, max: 100) + offset: Number of results to skip (default: 0) + + Returns: + GeofenceListResponse with list of geofences and pagination info + + Raises: + HTTPException: 403 if missing scope + """ + # Check read scope + if not api_client.has_scope("gis", "read"): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Client does not have gis:read scope", + ) + + # Build search domain + domain = [] + if geofence_type: + domain.append(("geofence_type", "=", geofence_type)) + if incident_id is not None: + domain.append(("incident_id", "=", incident_id)) + if active is not None: + domain.append(("active", "=", active)) + else: + # Default to active geofences only + domain.append(("active", "=", True)) + + # Get geofence model + geofence_model = env["spp.gis.geofence"] + + # Get total count + total = geofence_model.search_count(domain) + + # Search with pagination + geofences = geofence_model.search(domain, limit=count, offset=offset, order="name") + + # Convert to response schema + items = [ + GeofenceListItem( + id=geofence.id, + name=geofence.name, + geofence_type=geofence.geofence_type, + area_sqkm=geofence.area_sqkm, + active=geofence.active, + ) + for geofence in geofences + ] + + return GeofenceListResponse( + geofences=items, + total=total, + offset=offset, + count=len(items), + ) + + +@geofence_router.get( + "/geofences/{geofence_id}", + summary="Get geofence", + description="Get a single geofence with full GeoJSON representation.", +) +async def get_geofence( + geofence_id: Annotated[int, Path(description="Geofence ID")], + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], +): + """Get a single geofence by ID. + + Returns the full GeoJSON Feature representation including geometry. + + Args: + geofence_id: Database ID of the geofence + env: Odoo environment + api_client: Authenticated API client + + Returns: + dict: GeoJSON Feature with geometry and properties + + Raises: + HTTPException: 403 if missing scope, 404 if not found + """ + # Check read scope + if not api_client.has_scope("gis", "read"): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Client does not have gis:read scope", + ) + + # Get geofence model + geofence_model = env["spp.gis.geofence"] + + # Search for geofence + geofence = geofence_model.browse(geofence_id) + + if not geofence.exists(): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Geofence with ID {geofence_id} not found", + ) + + # Return full GeoJSON representation + try: + return geofence.to_geojson() + except Exception as e: + _logger.exception("Error converting geofence to GeoJSON") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Failed to convert geofence to GeoJSON: {str(e)}", + ) from e + + +@geofence_router.delete( + "/geofences/{geofence_id}", + status_code=status.HTTP_204_NO_CONTENT, + summary="Archive geofence", + description="Soft delete a geofence by setting active=False.", +) +async def delete_geofence( + geofence_id: Annotated[int, Path(description="Geofence ID")], + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], +): + """Archive a geofence (soft delete). + + Sets active=False rather than permanently deleting the record. + + Args: + geofence_id: Database ID of the geofence + env: Odoo environment + api_client: Authenticated API client + + Raises: + HTTPException: 403 if missing scope, 404 if not found + """ + # Check geofence scope (same as create) + if not api_client.has_scope("gis", "geofence"): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Client does not have gis:geofence scope", + ) + + # Get geofence model + geofence_model = env["spp.gis.geofence"] + + # Search for geofence + geofence = geofence_model.browse(geofence_id) + + if not geofence.exists(): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Geofence with ID {geofence_id} not found", + ) + + # Soft delete by setting active=False + try: + geofence.write({"active": False}) + except Exception as e: + _logger.exception("Error archiving geofence") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Failed to archive geofence: {str(e)}", + ) from e diff --git a/spp_api_v2_gis/routers/ogc_features.py b/spp_api_v2_gis/routers/ogc_features.py new file mode 100644 index 00000000..a1a13ab0 --- /dev/null +++ b/spp_api_v2_gis/routers/ogc_features.py @@ -0,0 +1,388 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""OGC API - Features endpoints. + +Implements the OGC API - Features Core standard (Part 1: Core) for +GovStack GIS Building Block compliance. Maps existing GIS report data +and data layers to OGC-compliant feature collections. + +Endpoints: + GET /gis/ogc/ Landing page + GET /gis/ogc/conformance Conformance classes + GET /gis/ogc/collections List all collections + GET /gis/ogc/collections/{id} Single collection metadata + GET /gis/ogc/collections/{id}/items Feature items (GeoJSON) + GET /gis/ogc/collections/{id}/items/{fid} Single feature + GET /gis/ogc/collections/{id}/qml QGIS style file (extension) +""" + +import json +import logging +import re +from typing import Annotated + +from odoo.api import Environment +from odoo.exceptions import MissingError + +from odoo.addons.fastapi.dependencies import odoo_env +from odoo.addons.spp_api_v2.middleware.auth import get_authenticated_client + +from fastapi import APIRouter, Depends, HTTPException, Path, Query, Request, Response, status + +from ..schemas.ogc import ( + CollectionInfo, + Collections, + Conformance, + LandingPage, +) +from ..services.ogc_service import OGCService +from ..services.qml_template_service import QMLTemplateService + +_logger = logging.getLogger(__name__) + +ogc_features_router = APIRouter(tags=["GIS - OGC API Features"], prefix="/gis/ogc") + + +def _get_base_url(request: Request) -> str: + """Extract base URL from request for self-referencing links. + + Args: + request: FastAPI request object + + Returns: + Base URL string (scheme + host + API prefix) + """ + # Build from request URL, stripping the OGC path suffix + url = str(request.base_url).rstrip("/") + return f"{url}/api/v2/spp" + + +def _check_gis_read_scope(api_client): + """Verify client has gis:read scope. + + Args: + api_client: Authenticated API client + + Raises: + HTTPException: If scope check fails + """ + if not api_client.has_scope("gis", "read"): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Client does not have gis:read scope", + ) + + +@ogc_features_router.get( + "", + response_model=LandingPage, + summary="OGC API landing page", + description="Provides links to the API definition, conformance, and collections.", +) +async def get_landing_page( + request: Request, + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], +): + """OGC API - Features landing page.""" + _check_gis_read_scope(api_client) + + base_url = _get_base_url(request) + service = OGCService(env, base_url) + return service.get_landing_page() + + +@ogc_features_router.get( + "/conformance", + response_model=Conformance, + summary="OGC conformance classes", + description="Declares which OGC API conformance classes this server implements.", +) +async def get_conformance( + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], +): + """OGC API conformance declaration.""" + _check_gis_read_scope(api_client) + + service = OGCService(env) + return service.get_conformance() + + +@ogc_features_router.get( + "/collections", + response_model=Collections, + response_model_exclude_none=True, + summary="List feature collections", + description=("Lists all available feature collections. Each GIS report and data layer becomes a collection."), +) +async def get_collections( + request: Request, + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], +): + """List all OGC feature collections.""" + _check_gis_read_scope(api_client) + + base_url = _get_base_url(request) + service = OGCService(env, base_url) + return service.get_collections() + + +@ogc_features_router.get( + "/collections/{collection_id}", + response_model=CollectionInfo, + response_model_exclude_none=True, + summary="Collection metadata", + description="Returns metadata for a single feature collection.", +) +async def get_collection( + collection_id: Annotated[str, Path(description="Collection identifier")], + request: Request, + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], +): + """Get single collection metadata.""" + _check_gis_read_scope(api_client) + + try: + base_url = _get_base_url(request) + service = OGCService(env, base_url) + return service.get_collection(collection_id) + except MissingError as e: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=str(e), + ) from e + + +@ogc_features_router.get( + "/collections/{collection_id}/items", + summary="Feature items", + description=( + "Returns features from a collection as a GeoJSON FeatureCollection. " + "Supports pagination via limit/offset and spatial filtering via bbox." + ), +) +async def get_collection_items( + collection_id: Annotated[str, Path(description="Collection identifier")], + request: Request, + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], + limit: Annotated[int, Query(description="Maximum number of features", ge=1, le=10000)] = 1000, + offset: Annotated[int, Query(description="Pagination offset", ge=0)] = 0, + bbox: Annotated[ + str | None, + Query( + description=("Bounding box filter: west,south,east,north (e.g., -180,-90,180,90)"), + ), + ] = None, +): + """Get features from a collection. + + Returns GeoJSON FeatureCollection with OGC pagination links. + """ + _check_gis_read_scope(api_client) + + # Parse bbox parameter + bbox_list = None + if bbox: + try: + parts = [float(x.strip()) for x in bbox.split(",")] + if len(parts) != 4: + raise ValueError("bbox must have exactly 4 values") + bbox_list = parts + except ValueError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Invalid bbox parameter: {e}", + ) from e + + try: + base_url = _get_base_url(request) + service = OGCService(env, base_url) + result = service.get_collection_items( + collection_id, + limit=limit, + offset=offset, + bbox=bbox_list, + ) + return Response( + content=_json_dumps(result), + media_type="application/geo+json", + headers={"Content-Crs": ""}, + ) + except MissingError as e: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=str(e), + ) from e + except ValueError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) from e + + +@ogc_features_router.options( + "/collections/{collection_id}/items", + summary="Advertise supported methods for items endpoint", + include_in_schema=False, +) +async def options_collection_items( + collection_id: Annotated[str, Path(description="Collection identifier")], +): + """Handle OPTIONS requests from OAPIF clients (e.g. QGIS). + + QGIS sends OPTIONS to discover allowed methods before fetching features. + """ + return Response( + status_code=200, + headers={ + "Allow": "GET, HEAD, OPTIONS", + "Accept": "application/geo+json, application/json", + }, + ) + + +@ogc_features_router.get( + "/collections/{collection_id}/items/{feature_id}", + summary="Single feature", + description="Returns a single feature from a collection.", +) +async def get_collection_item( + collection_id: Annotated[str, Path(description="Collection identifier")], + feature_id: Annotated[str, Path(description="Feature identifier")], + request: Request, + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], +): + """Get a single feature by ID.""" + _check_gis_read_scope(api_client) + + try: + base_url = _get_base_url(request) + service = OGCService(env, base_url) + result = service.get_collection_item(collection_id, feature_id) + return Response( + content=_json_dumps(result), + media_type="application/geo+json", + headers={"Content-Crs": ""}, + ) + except MissingError as e: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=str(e), + ) from e + + +@ogc_features_router.get( + "/collections/{collection_id}/qml", + summary="QGIS style file (OpenSPP extension)", + description=( + "Returns a QML style file for the collection. " + "This is an OpenSPP extension to the OGC API standard, " + "used by the QGIS plugin for automatic layer styling." + ), + response_class=Response, +) +async def get_collection_qml( + collection_id: Annotated[str, Path(description="Collection identifier")], + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], + field_name: Annotated[str | None, Query(description="Field to symbolize")] = None, + opacity: Annotated[float, Query(description="Layer opacity (0.0-1.0)", ge=0.0, le=1.0)] = 0.7, +): + """Get QGIS style file (QML) for collection. + + Returns a QML XML file that can be loaded in QGIS to style the layer + according to the report's color scheme and thresholds. Only available + for report-based collections. + """ + _check_gis_read_scope(api_client) + + try: + # Resolve collection to a report for QML generation + admin_level = None + if collection_id.startswith("layer_"): + # Report-driven data layers can provide QML via their linked report + try: + layer_database_id = int(collection_id[6:]) + except ValueError as e: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="QML styles only available for report-based collections", + ) from e + if layer_database_id <= 0: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="QML styles only available for report-based collections", + ) + layer = env["spp.gis.data.layer"].sudo().browse(layer_database_id) + if not layer.exists() or not ( + hasattr(layer, "source_type") and layer.source_type == "report" and layer.report_id + ): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="QML styles only available for report-based collections", + ) + report = layer.report_id + else: + # Strip _admN suffix before report lookup + report_code = collection_id + admin_level = None + match = re.match(r"^(.+)_adm(\d+)$", collection_id) + if match: + report_code = match.group(1) + admin_level = int(match.group(2)) + report = env["spp.gis.report"].sudo().search([("code", "=", report_code)], limit=1) + if not report: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Report with code '{report_code}' not found", + ) + + # Generate QML (use report.env which is already sudo from line above) + # Pass admin_level so thresholds can be adapted to the level's data range + qml_service = QMLTemplateService(report.env) + qml_xml = qml_service.generate_qml( + report_id=report.id, + geometry_type=report.geometry_type, + field_name=field_name, + opacity=opacity, + admin_level=admin_level, + ) + + return Response( + content=qml_xml, + media_type="text/xml", + headers={ + "Content-Disposition": f'attachment; filename="{collection_id}.qml"', + }, + ) + + except HTTPException: + raise + except ValueError as e: + _logger.warning("Invalid QML request for %s: %s", collection_id, e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) from e + except Exception as e: + _logger.error("Error generating QML for %s: %s", collection_id, e, exc_info=True) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to generate QML style file", + ) from e + + +def _json_dumps(data): + """Serialize data to JSON string. + + Args: + data: Data to serialize + + Returns: + JSON string + """ + return json.dumps(data, default=str) diff --git a/spp_api_v2_gis/routers/proximity.py b/spp_api_v2_gis/routers/proximity.py new file mode 100644 index 00000000..f510d26c --- /dev/null +++ b/spp_api_v2_gis/routers/proximity.py @@ -0,0 +1,81 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Proximity query API endpoint for finding registrants within/beyond radius of reference points.""" + +import logging +from typing import Annotated + +from odoo.api import Environment + +from odoo.addons.fastapi.dependencies import odoo_env +from odoo.addons.spp_api_v2.middleware.auth import get_authenticated_client + +from fastapi import APIRouter, Body, Depends, HTTPException, status + +from ..schemas.query import ProximityQueryRequest, ProximityQueryResponse +from ..services.spatial_query_service import SpatialQueryService + +_logger = logging.getLogger(__name__) + +proximity_router = APIRouter(tags=["GIS"], prefix="/gis") + + +@proximity_router.post( + "/query/proximity", + summary="Query statistics by proximity to reference points", + description="Find registrants within or beyond a given radius from a set of " + "reference points (e.g., health centers). Supports thousands of reference points " + "using pre-buffered spatial indexes.", + response_model=ProximityQueryResponse, +) +async def query_proximity( + request: Annotated[ProximityQueryRequest, Body(...)], + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], +) -> ProximityQueryResponse: + """Query registrant statistics by proximity to reference points. + + Accepts a list of reference point coordinates and a radius. Returns + aggregated statistics for registrants that are within or beyond the + specified distance from any of the reference points. + + The server pre-buffers reference points and uses ST_Intersects against + indexed registrant coordinates for efficient queries. + """ + # Check read scope - accept either gis:read or statistics:read + if not (api_client.has_scope("gis", "read") or api_client.has_scope("statistics", "read")): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Client does not have gis:read or statistics:read scope", + ) + + try: + service = SpatialQueryService(env) + + # Convert pydantic models to dicts for the service + reference_points = [{"longitude": pt.longitude, "latitude": pt.latitude} for pt in request.reference_points] + + result = service.query_proximity( + reference_points=reference_points, + radius_km=request.radius_km, + relation=request.relation, + filters=request.filters, + variables=request.variables, + ) + + # Remove internal registrant_ids from response + result.pop("registrant_ids", None) + + return ProximityQueryResponse(**result) + + except ValueError as e: + _logger.warning("Invalid proximity query parameters: %s", e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) from None + except Exception as e: + _logger.exception("Proximity query failed") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Proximity query failed: {str(e)}", + ) from None diff --git a/spp_api_v2_gis/routers/spatial_query.py b/spp_api_v2_gis/routers/spatial_query.py new file mode 100644 index 00000000..4759969c --- /dev/null +++ b/spp_api_v2_gis/routers/spatial_query.py @@ -0,0 +1,137 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Spatial query API endpoints for querying statistics within arbitrary polygons.""" + +import logging +from typing import Annotated + +from odoo.api import Environment + +from odoo.addons.fastapi.dependencies import odoo_env +from odoo.addons.spp_api_v2.middleware.auth import get_authenticated_client + +from fastapi import APIRouter, Body, Depends, HTTPException, status + +from ..schemas.query import ( + BatchSpatialQueryRequest, + BatchSpatialQueryResponse, + SpatialQueryRequest, + SpatialQueryResponse, +) +from ..services.spatial_query_service import SpatialQueryService + +_logger = logging.getLogger(__name__) + +spatial_query_router = APIRouter(tags=["GIS"], prefix="/gis") + + +@spatial_query_router.post( + "/query/statistics", + summary="Query statistics for polygon", + description="Query registrant statistics within arbitrary polygon using PostGIS.", + response_model=SpatialQueryResponse, +) +async def query_statistics( + request: Annotated[SpatialQueryRequest, Body(...)], + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], +) -> SpatialQueryResponse: + """Query statistics within polygon. + + This endpoint accepts a GeoJSON polygon and returns aggregated statistics + for registrants within that area. It uses PostGIS spatial queries for + efficient computation. + + Query methods: + - coordinates: Direct spatial query when registrants have coordinates (preferred) + - area_fallback: Match via area_id when coordinates not available + + Statistics are computed by the unified aggregation service. + """ + # Check read scope - accept either gis:read or statistics:read + if not (api_client.has_scope("gis", "read") or api_client.has_scope("statistics", "read")): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Client does not have gis:read or statistics:read scope", + ) + + try: + # Initialize service + service = SpatialQueryService(env) + + # Execute spatial query + result = service.query_statistics( + geometry=request.geometry, + filters=request.filters, + variables=request.variables, + ) + + # Remove internal registrant_ids from response + result.pop("registrant_ids", None) + + return SpatialQueryResponse(**result) + + except ValueError as e: + _logger.warning("Invalid query parameters: %s", e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) from e + except Exception as e: + _logger.exception("Spatial query failed") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Spatial query failed: {str(e)}", + ) from e + + +@spatial_query_router.post( + "/query/statistics/batch", + summary="Batch query statistics for multiple polygons", + description="Query registrant statistics for multiple polygons individually. " + "Returns per-geometry results plus an aggregate summary.", + response_model=BatchSpatialQueryResponse, +) +async def query_statistics_batch( + request: Annotated[BatchSpatialQueryRequest, Body(...)], + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], +) -> BatchSpatialQueryResponse: + """Batch query statistics for multiple geometries. + + Each geometry is queried independently, returning per-shape statistics + that can be used for thematic map visualization. A summary field provides + the deduplicated aggregate across all shapes. + """ + # Check read scope - accept either gis:read or statistics:read + if not (api_client.has_scope("gis", "read") or api_client.has_scope("statistics", "read")): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Client does not have gis:read or statistics:read scope", + ) + + try: + service = SpatialQueryService(env) + + # Convert pydantic models to dicts for the service + geometries = [{"id": item.id, "geometry": item.geometry} for item in request.geometries] + + result = service.query_statistics_batch( + geometries=geometries, + filters=request.filters, + variables=request.variables, + ) + + return BatchSpatialQueryResponse(**result) + + except ValueError as e: + _logger.warning("Invalid batch query parameters: %s", e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) from None + except Exception as e: + _logger.exception("Batch spatial query failed") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Batch spatial query failed: {str(e)}", + ) from None diff --git a/spp_api_v2_gis/routers/statistics.py b/spp_api_v2_gis/routers/statistics.py new file mode 100644 index 00000000..607c82eb --- /dev/null +++ b/spp_api_v2_gis/routers/statistics.py @@ -0,0 +1,91 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Statistics discovery API endpoint.""" + +import logging +from typing import Annotated + +from odoo.api import Environment + +from odoo.addons.fastapi.dependencies import odoo_env +from odoo.addons.spp_api_v2.middleware.auth import get_authenticated_client + +from fastapi import APIRouter, Depends, HTTPException, status + +from ..schemas.statistics import ( + StatisticCategoryInfo, + StatisticInfo, + StatisticsListResponse, +) + +_logger = logging.getLogger(__name__) + +statistics_router = APIRouter(tags=["GIS"], prefix="/gis") + + +@statistics_router.get( + "/statistics", + summary="List published GIS statistics", + description="Returns all statistics published for GIS context, grouped by category.", + response_model=StatisticsListResponse, +) +async def list_statistics( + env: Annotated[Environment, Depends(odoo_env)], + api_client: Annotated[dict, Depends(get_authenticated_client)], +) -> StatisticsListResponse: + """List all GIS-published statistics grouped by category. + + Used by the QGIS plugin to discover what statistics are available + for spatial queries and map visualization. + """ + # Check read scope + if not api_client.has_scope("gis", "read"): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Client does not have gis:read scope", + ) + + try: + Statistic = env["spp.statistic"].sudo() + stats_by_category = Statistic.get_published_by_category("gis") + + categories = [] + total_count = 0 + + for category_code, stat_records in stats_by_category.items(): + # Get category metadata + category_record = stat_records[0].category_id if stat_records else None + + stat_items = [] + for stat in stat_records: + config = stat.get_context_config("gis") + stat_items.append( + StatisticInfo( + name=stat.name, + label=config.get("label", stat.label), + description=stat.description, + format=config.get("format", stat.format), + unit=stat.unit, + ) + ) + + categories.append( + StatisticCategoryInfo( + code=category_code, + name=category_record.name if category_record else category_code.replace("_", " ").title(), + icon=category_record.icon if category_record else None, + statistics=stat_items, + ) + ) + total_count += len(stat_items) + + return StatisticsListResponse( + categories=categories, + total_count=total_count, + ) + + except Exception as e: + _logger.exception("Failed to list statistics") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Failed to list statistics: {str(e)}", + ) from None diff --git a/spp_api_v2_gis/schemas/__init__.py b/spp_api_v2_gis/schemas/__init__.py new file mode 100644 index 00000000..7a73017a --- /dev/null +++ b/spp_api_v2_gis/schemas/__init__.py @@ -0,0 +1,5 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +from . import geofence +from . import geojson +from . import ogc +from . import query diff --git a/spp_api_v2_gis/schemas/geofence.py b/spp_api_v2_gis/schemas/geofence.py new file mode 100644 index 00000000..d53f6410 --- /dev/null +++ b/spp_api_v2_gis/schemas/geofence.py @@ -0,0 +1,45 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Pydantic schemas for Geofence API.""" + +from pydantic import BaseModel, Field + + +class GeofenceCreateRequest(BaseModel): + """Request to create a geofence.""" + + name: str = Field(..., description="Name of the geofence") + description: str | None = Field(default=None, description="Description of the geofence") + geometry: dict = Field(..., description="Geometry as GeoJSON (Polygon or MultiPolygon)") + geofence_type: str = Field(default="custom", description="Type of geofence") + incident_code: str | None = Field(default=None, description="Related incident code") + + +class GeofenceResponse(BaseModel): + """Response from geofence operations.""" + + id: int = Field(..., description="Geofence database identifier") + name: str = Field(..., description="Geofence name") + description: str | None = Field(default=None, description="Geofence description") + geofence_type: str = Field(..., description="Type of geofence") + area_sqkm: float = Field(..., description="Area in square kilometers") + active: bool = Field(..., description="Whether the geofence is active") + created_from: str = Field(..., description="Source of creation") + + +class GeofenceListItem(BaseModel): + """Geofence item in list response.""" + + id: int = Field(..., description="Geofence database identifier") + name: str = Field(..., description="Geofence name") + geofence_type: str = Field(..., description="Type of geofence") + area_sqkm: float = Field(..., description="Area in square kilometers") + active: bool = Field(..., description="Whether the geofence is active") + + +class GeofenceListResponse(BaseModel): + """Response from geofence list endpoint.""" + + geofences: list[GeofenceListItem] = Field(..., description="List of geofences") + total: int = Field(..., description="Total count of geofences") + offset: int = Field(..., description="Offset used for pagination") + count: int = Field(..., description="Number of items returned") diff --git a/spp_api_v2_gis/schemas/geojson.py b/spp_api_v2_gis/schemas/geojson.py new file mode 100644 index 00000000..62debb15 --- /dev/null +++ b/spp_api_v2_gis/schemas/geojson.py @@ -0,0 +1,35 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Pydantic schemas for GeoJSON responses.""" + +from pydantic import BaseModel, Field + + +class GeoJSONGeometry(BaseModel): + """GeoJSON geometry.""" + + type: str = Field(..., description="Geometry type") + coordinates: list = Field(..., description="Coordinates array") + + +class GeoJSONFeatureProperties(BaseModel): + """Properties for a GeoJSON feature.""" + + # Flexible properties - subclass for specific feature types + pass + + +class GeoJSONFeature(BaseModel): + """GeoJSON Feature.""" + + type: str = Field(default="Feature", description="GeoJSON type") + properties: dict = Field(..., description="Feature properties") + geometry: dict | None = Field(default=None, description="GeoJSON geometry") + + +class GeoJSONFeatureCollection(BaseModel): + """GeoJSON FeatureCollection.""" + + type: str = Field(default="FeatureCollection", description="GeoJSON type") + features: list[GeoJSONFeature] = Field(..., description="List of features") + metadata: dict | None = Field(default=None, description="Collection metadata") + styling: dict | None = Field(default=None, description="Styling hints for QGIS") diff --git a/spp_api_v2_gis/schemas/ogc.py b/spp_api_v2_gis/schemas/ogc.py new file mode 100644 index 00000000..c3c7b55a --- /dev/null +++ b/spp_api_v2_gis/schemas/ogc.py @@ -0,0 +1,83 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Pydantic schemas for OGC API - Features responses. + +Implements the OGC API - Features Core standard (Part 1: Core) +for GovStack GIS Building Block compliance. +""" + +from pydantic import BaseModel, ConfigDict, Field + + +class OGCLink(BaseModel): + """OGC API link object.""" + + href: str = Field(..., description="URL of the link target") + rel: str = Field(..., description="Relation type (e.g., self, items, conformance)") + type: str | None = Field(default=None, description="Media type of the target") + title: str | None = Field(default=None, description="Human-readable title") + + +class LandingPage(BaseModel): + """OGC API - Features landing page.""" + + title: str = Field(..., description="API title") + description: str = Field(..., description="API description") + links: list[OGCLink] = Field(..., description="Navigation links") + + +class Conformance(BaseModel): + """OGC API conformance declaration.""" + + conformsTo: list[str] = Field( # noqa: N815 + ..., description="List of conformance class URIs" + ) + + +class SpatialExtent(BaseModel): + """Spatial extent with bounding box.""" + + bbox: list[list[float]] = Field(..., description="Bounding box coordinates [[west, south, east, north]]") + crs: str = Field( + default="http://www.opengis.net/def/crs/OGC/1.3/CRS84", + description="Coordinate reference system", + ) + + +class TemporalExtent(BaseModel): + """Temporal extent with time interval.""" + + interval: list[list[str | None]] = Field(..., description="Time interval [[start, end]]") + + +class Extent(BaseModel): + """Collection extent (spatial and temporal).""" + + spatial: SpatialExtent | None = Field(default=None, description="Spatial extent") + temporal: TemporalExtent | None = Field(default=None, description="Temporal extent") + + +class CollectionInfo(BaseModel): + """OGC API collection metadata.""" + + model_config = ConfigDict(populate_by_name=True) + + id: str = Field(..., description="Collection identifier") + title: str = Field(..., description="Human-readable title") + description: str | None = Field(default=None, description="Collection description") + extent: Extent | None = Field(default=None, description="Spatial/temporal extent") + itemType: str = Field( # noqa: N815 + default="feature", + description="Type of items in collection", + ) + crs: list[str] = Field( + default=["http://www.opengis.net/def/crs/OGC/1.3/CRS84"], + description="Supported CRS list", + ) + links: list[OGCLink] = Field(default_factory=list, description="Navigation links") + + +class Collections(BaseModel): + """OGC API collections list.""" + + links: list[OGCLink] = Field(default_factory=list, description="Navigation links") + collections: list[CollectionInfo] = Field(..., description="Available collections") diff --git a/spp_api_v2_gis/schemas/query.py b/spp_api_v2_gis/schemas/query.py new file mode 100644 index 00000000..5086026e --- /dev/null +++ b/spp_api_v2_gis/schemas/query.py @@ -0,0 +1,195 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Pydantic schemas for spatial query API.""" + +from typing import Literal + +from pydantic import BaseModel, Field + + +class SpatialQueryRequest(BaseModel): + """Request for spatial query.""" + + geometry: dict = Field(..., description="Query geometry as GeoJSON (Polygon or MultiPolygon)") + filters: dict | None = Field(default=None, description="Additional filters for registrants") + variables: list[str] | None = Field( + default=None, + description="List of statistic names to compute (defaults to GIS-published statistics)", + ) + + +class SpatialQueryResponse(BaseModel): + """Response from spatial query.""" + + total_count: int = Field(..., description="Total number of registrants in query area") + query_method: str = Field( + ..., + description="Method used for query (coordinates, area_fallback)", + ) + areas_matched: int = Field(..., description="Number of areas intersecting query polygon") + statistics: dict = Field(..., description="Computed aggregate statistics") + access_level: str | None = Field( + default=None, + description="Access level applied to statistics (aggregate or individual)", + ) + from_cache: bool = Field( + default=False, + description="Whether statistics were served from cache", + ) + computed_at: str | None = Field( + default=None, + description="ISO 8601 timestamp when statistics were computed", + ) + + +# === Batch Query Schemas === + + +class GeometryItem(BaseModel): + """A single geometry with an identifier for batch queries.""" + + id: str = Field(..., description="Unique identifier for this geometry (e.g., feature ID)") + geometry: dict = Field(..., description="GeoJSON geometry (Polygon or MultiPolygon)") + + +class BatchSpatialQueryRequest(BaseModel): + """Request for batch spatial query across multiple geometries.""" + + geometries: list[GeometryItem] = Field( + ..., + min_length=1, + description="List of geometries to query, each with a unique ID", + ) + filters: dict | None = Field(default=None, description="Additional filters for registrants") + variables: list[str] | None = Field( + default=None, + description="List of statistic names to compute (defaults to GIS-published statistics)", + ) + + +class BatchResultItem(BaseModel): + """Result for a single geometry in a batch query.""" + + id: str = Field(..., description="Geometry identifier matching the request") + total_count: int = Field(..., description="Total number of registrants in this geometry") + query_method: str = Field( + ..., + description="Method used for query (coordinates, area_fallback)", + ) + areas_matched: int = Field(..., description="Number of areas intersecting this geometry") + statistics: dict = Field(..., description="Statistics computed for this geometry") + access_level: str | None = Field( + default=None, + description="Access level applied to statistics (aggregate or individual)", + ) + from_cache: bool = Field( + default=False, + description="Whether statistics were served from cache", + ) + computed_at: str | None = Field( + default=None, + description="ISO 8601 timestamp when statistics were computed", + ) + + +class BatchSummary(BaseModel): + """Aggregated summary across all geometries in a batch query.""" + + total_count: int = Field(..., description="Combined total registrants across all geometries") + geometries_queried: int = Field(..., description="Number of geometries in the batch") + statistics: dict = Field(..., description="Combined statistics across all geometries") + access_level: str | None = Field( + default=None, + description="Access level applied to statistics (aggregate or individual)", + ) + from_cache: bool = Field( + default=False, + description="Whether statistics were served from cache", + ) + computed_at: str | None = Field( + default=None, + description="ISO 8601 timestamp when statistics were computed", + ) + + +class BatchSpatialQueryResponse(BaseModel): + """Response from batch spatial query.""" + + results: list[BatchResultItem] = Field(..., description="Per-geometry results") + summary: BatchSummary = Field(..., description="Aggregated summary across all geometries") + + +# === Proximity Query Schemas === + + +class ReferencePoint(BaseModel): + """A geographic reference point for proximity queries.""" + + longitude: float = Field(..., ge=-180, le=180, description="Longitude in decimal degrees") + latitude: float = Field(..., ge=-90, le=90, description="Latitude in decimal degrees") + + +class ProximityQueryRequest(BaseModel): + """Request for proximity-based spatial query. + + Finds registrants within or beyond a given radius from a set of + reference points (e.g., health centers, schools). The server + pre-buffers the reference points and uses ST_Intersects against + indexed registrant coordinates for efficient queries even with + thousands of reference points. + """ + + reference_points: list[ReferencePoint] = Field( + ..., + min_length=1, + max_length=10000, + description="Reference locations (e.g., health centers) as lon/lat points", + ) + radius_km: float = Field( + ..., + gt=0, + le=500, + description="Search radius in kilometers", + ) + relation: Literal["within", "beyond"] = Field( + default="within", + description="'within' returns registrants inside the radius; " + "'beyond' returns those outside the radius of all reference points", + ) + filters: dict | None = Field(default=None, description="Additional filters for registrants") + variables: list[str] | None = Field( + default=None, + description="List of statistic names to compute (defaults to GIS-published statistics)", + ) + + +class ProximityQueryResponse(BaseModel): + """Response from proximity-based spatial query.""" + + total_count: int = Field(..., description="Number of registrants matching the proximity criteria") + query_method: str = Field( + ..., + description="Method used for query (coordinates, area_fallback)", + ) + areas_matched: int = Field( + ..., + description="Number of areas matched (0 for coordinate-based queries)", + ) + reference_points_count: int = Field( + ..., + description="Number of reference points used in the query", + ) + radius_km: float = Field(..., description="Radius used for the query (km)") + relation: str = Field(..., description="Relation used: 'within' or 'beyond'") + statistics: dict = Field(..., description="Computed aggregate statistics") + access_level: str | None = Field( + default=None, + description="Access level applied to statistics (aggregate or individual)", + ) + from_cache: bool = Field( + default=False, + description="Whether statistics were served from cache", + ) + computed_at: str | None = Field( + default=None, + description="ISO 8601 timestamp when statistics were computed", + ) diff --git a/spp_api_v2_gis/schemas/statistics.py b/spp_api_v2_gis/schemas/statistics.py new file mode 100644 index 00000000..5e846da2 --- /dev/null +++ b/spp_api_v2_gis/schemas/statistics.py @@ -0,0 +1,30 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Pydantic schemas for statistics discovery API.""" + +from pydantic import BaseModel, Field + + +class StatisticInfo(BaseModel): + """Information about a single published statistic.""" + + name: str = Field(..., description="Technical name (e.g., 'children_under_5')") + label: str = Field(..., description="Display label (e.g., 'Children Under 5')") + description: str | None = Field(default=None, description="Detailed description") + format: str = Field(..., description="Aggregation format (count, sum, avg, percent, ratio, currency)") + unit: str | None = Field(default=None, description="Unit of measurement") + + +class StatisticCategoryInfo(BaseModel): + """Information about a category of statistics.""" + + code: str = Field(..., description="Category code (e.g., 'demographics')") + name: str = Field(..., description="Display name (e.g., 'Demographics')") + icon: str | None = Field(default=None, description="Font Awesome icon class") + statistics: list[StatisticInfo] = Field(..., description="Statistics in this category") + + +class StatisticsListResponse(BaseModel): + """Response listing all published statistics for a context.""" + + categories: list[StatisticCategoryInfo] = Field(..., description="Statistics organized by category") + total_count: int = Field(..., description="Total number of statistics across all categories") diff --git a/spp_api_v2_gis/security/ir.model.access.csv b/spp_api_v2_gis/security/ir.model.access.csv new file mode 100644 index 00000000..ef0feb19 --- /dev/null +++ b/spp_api_v2_gis/security/ir.model.access.csv @@ -0,0 +1,5 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink +access_spp_gis_geofence_admin,Geofence Admin,model_spp_gis_geofence,spp_security.group_spp_admin,1,1,1,1 +access_spp_gis_geofence_manager,Geofence Manager,model_spp_gis_geofence,spp_registry.group_registry_manager,1,1,1,1 +access_spp_gis_geofence_officer,Geofence Officer,model_spp_gis_geofence,spp_registry.group_registry_officer,1,1,1,0 +access_spp_gis_geofence_read,Geofence Read,model_spp_gis_geofence,spp_registry.group_registry_read,1,0,0,0 diff --git a/spp_api_v2_gis/services/__init__.py b/spp_api_v2_gis/services/__init__.py new file mode 100644 index 00000000..a09978c4 --- /dev/null +++ b/spp_api_v2_gis/services/__init__.py @@ -0,0 +1,16 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +from . import catalog_service +from . import export_service +from . import layers_service +from . import ogc_service +from . import qml_template_service +from . import spatial_query_service + +__all__ = [ + "catalog_service", + "export_service", + "layers_service", + "ogc_service", + "qml_template_service", + "spatial_query_service", +] diff --git a/spp_api_v2_gis/services/catalog_service.py b/spp_api_v2_gis/services/catalog_service.py new file mode 100644 index 00000000..2f0c32ef --- /dev/null +++ b/spp_api_v2_gis/services/catalog_service.py @@ -0,0 +1,167 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Service for building catalog from spp.gis.report and spp.gis.data.layer.""" + +import logging + +_logger = logging.getLogger(__name__) + + +class CatalogService: + """Service for building GIS catalog.""" + + def __init__(self, env): + """Initialize catalog service.""" + self.env = env + + def get_catalog(self): + """Build catalog from available reports and data layers. + + Returns: + dict: Catalog with reports, data_layers, and area_level_names + """ + reports = self._get_reports() + data_layers = self._get_data_layers() + area_level_names = self._get_area_level_names() + + return { + "reports": reports, + "data_layers": data_layers, + "area_level_names": area_level_names, + } + + def _get_reports(self): + """Query spp.gis.report for available reports. + + Returns: + list[dict]: List of report info dictionaries + """ + Report = self.env["spp.gis.report"].sudo() + reports = Report.search([("active", "=", True)], order="sequence, name") + + result = [] + for report in reports: + # Calculate freshness indicator + freshness = self._get_freshness_indicator(report) + + # Query distinct area levels that have data for this report + groups = ( + self.env["spp.gis.report.data"] + .sudo() + ._read_group( + [("report_id", "=", report.id)], + groupby=["area_level"], + aggregates=[], + ) + ) + admin_levels_available = sorted([level for (level,) in groups if level is not None]) + + report_info = { + "id": report.code, + "name": report.name, + "description": report.description or None, + "category": report.category_id.name if report.category_id else None, + "geometry_type": self._normalize_geometry_type(report.geometry_type), + "area_level": report.base_area_level, + "last_refresh": report.last_refresh.isoformat() if report.last_refresh else None, + "freshness": freshness, + "admin_levels_available": admin_levels_available, + } + result.append(report_info) + + _logger.info("Found %d GIS reports for catalog", len(result)) + return result + + def _get_data_layers(self): + """Query spp.gis.data.layer for available layers. + + Returns: + list[dict]: List of data layer info dictionaries + """ + Layer = self.env["spp.gis.data.layer"].sudo() + layers = Layer.search([], order="sequence, name") + + result = [] + for layer in layers: + layer_info = { + "id": str(layer.id), + "name": layer.name, + "geometry_type": self._map_geo_repr_to_geometry_type(layer.geo_repr), + "source_model": layer.model_name or None, + "source_type": layer.source_type if hasattr(layer, "source_type") else None, + "report_code": layer.report_id.code if hasattr(layer, "report_id") and layer.report_id else None, + } + result.append(layer_info) + + _logger.info("Found %d GIS data layers for catalog", len(result)) + return result + + def _normalize_geometry_type(self, geometry_type): + """Normalize geometry type to standard GeoJSON types. + + Args: + geometry_type: Report geometry type (polygon, point, cluster, heatmap) + + Returns: + str: Normalized geometry type (polygon, point, line) + """ + geometry_type_map = { + "polygon": "polygon", + "point": "point", + "cluster": "point", # Clusters are still points + "heatmap": "point", # Heatmaps are based on points + } + return geometry_type_map.get(geometry_type, "polygon") + + def _map_geo_repr_to_geometry_type(self, geo_repr): + """Map data layer geo_repr to standard geometry type. + + Args: + geo_repr: Layer representation mode (basic, choropleth) + + Returns: + str: Geometry type (polygon, point, line) + """ + # Data layers use geo_repr for rendering mode, not geometry type + # Default to polygon as most common for area-based layers + return "polygon" + + def _get_area_level_names(self): + """Build mapping from area_level to area type name. + + Queries distinct area levels and their associated type names + from the area table. + + Returns: + dict: {area_level: type_name} mapping + """ + self.env.cr.execute( + """ + SELECT DISTINCT a.area_level, at.name + FROM spp_area a + LEFT JOIN spp_area_type at ON at.id = a.area_type_id + WHERE a.area_level IS NOT NULL AND at.name IS NOT NULL + ORDER BY a.area_level + """ + ) + result = {} + for level, name in self.env.cr.fetchall(): + if level not in result: + result[level] = name + return result + + def _get_freshness_indicator(self, report): + """Calculate freshness indicator for a report. + + Args: + report: spp.gis.report record + + Returns: + str: Freshness indicator (fresh, stale, never_refreshed) + """ + if not report.last_refresh: + return "never_refreshed" + + if report.is_stale: + return "stale" + + return "fresh" diff --git a/spp_api_v2_gis/services/export_service.py b/spp_api_v2_gis/services/export_service.py new file mode 100644 index 00000000..a8deeee3 --- /dev/null +++ b/spp_api_v2_gis/services/export_service.py @@ -0,0 +1,320 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Service for exporting GIS data as GeoPackage or ZIP.""" + +import io +import json +import logging +import tempfile +import zipfile +from pathlib import Path + +_logger = logging.getLogger(__name__) + + +class ExportService: + """Service for exporting GIS layers to various formats.""" + + def __init__(self, env): + """Initialize export service.""" + self.env = env + + def export_geopackage( + self, + layer_ids=None, + include_geofences=True, + admin_level=None, + ): + """Export layers and geofences as GeoPackage or ZIP of GeoJSON files. + + Args: + layer_ids: List of report codes to export (optional, all if None) + include_geofences: Include user's geofences (default: True) + admin_level: Filter layers by admin level (optional) + + Returns: + tuple: (bytes, filename, content_type) + - bytes: File content + - filename: Suggested filename + - content_type: MIME type + + Raises: + ValueError: If no layers found or invalid parameters + """ + # Collect layers to export + layers_data = self._collect_layers(layer_ids, admin_level) + + # Collect geofences if requested + geofences_data = [] + if include_geofences: + geofences_data = self._collect_geofences() + + if not layers_data and not geofences_data: + raise ValueError("No data available to export") + + # Try to create GeoPackage with fiona + try: + return self._create_geopackage(layers_data, geofences_data) + except ImportError: + _logger.info("fiona not available, falling back to GeoJSON ZIP export") + return self._create_geojson_zip(layers_data, geofences_data) + except Exception as e: + _logger.warning("GeoPackage creation failed: %s, falling back to GeoJSON ZIP", e) + return self._create_geojson_zip(layers_data, geofences_data) + + def _collect_layers(self, layer_ids, admin_level): + """Collect layer data for export. + + Args: + layer_ids: List of report codes or None for all + admin_level: Admin level filter + + Returns: + list: List of (name, geojson) tuples + """ + layers_data = [] + + # Get layers service + from .layers_service import LayersService + + layers_service = LayersService(self.env) + + # If specific layer_ids provided, use those + if layer_ids: + for layer_code in layer_ids: + try: + geojson = layers_service.get_layer_geojson( + layer_id=layer_code, + layer_type="report", + admin_level=admin_level, + include_geometry=True, + include_disaggregation=False, + ) + # Sanitize name for filename + layer_name = self._sanitize_filename(layer_code) + layers_data.append((layer_name, geojson)) + _logger.info("Collected layer: %s with %d features", layer_code, len(geojson.get("features", []))) + except Exception as e: + _logger.warning("Failed to collect layer %s: %s", layer_code, e) + else: + # Export all available reports + reports = self.env["spp.gis.report"].sudo().search([("active", "=", True)]) + for report in reports: + try: + geojson = layers_service.get_layer_geojson( + layer_id=report.code, + layer_type="report", + admin_level=admin_level, + include_geometry=True, + include_disaggregation=False, + ) + layer_name = self._sanitize_filename(report.code or f"layer_{report.id}") + layers_data.append((layer_name, geojson)) + _logger.info("Collected report: %s with %d features", report.code, len(geojson.get("features", []))) + except Exception as e: + _logger.warning("Failed to collect report %s: %s", report.code, e) + + return layers_data + + def _collect_geofences(self): + """Collect active geofences for export. + + Returns: + list: GeoJSON FeatureCollection for geofences + """ + geofence_model = self.env["spp.gis.geofence"] + geofences = geofence_model.search([("active", "=", True)]) + + if not geofences: + return [] + + features = [] + for geofence in geofences: + try: + feature = geofence.to_geojson() + features.append(feature) + except Exception as e: + _logger.warning("Failed to export geofence %s: %s", geofence.id, e) + + if not features: + return [] + + geojson = { + "type": "FeatureCollection", + "features": features, + "metadata": { + "layer": "geofences", + "count": len(features), + }, + } + + _logger.info("Collected %d geofences", len(features)) + return [("geofences", geojson)] + + def _create_geopackage(self, layers_data, geofences_data): + """Create GeoPackage file using fiona. + + Args: + layers_data: List of (name, geojson) tuples + geofences_data: List of (name, geojson) tuples + + Returns: + tuple: (bytes, filename, content_type) + """ + import fiona + from fiona.crs import from_epsg + + # Create temporary file for GeoPackage + with tempfile.NamedTemporaryFile(suffix=".gpkg", delete=False) as tmp_file: + gpkg_path = tmp_file.name + + try: + # Combine all data + all_data = layers_data + geofences_data + + # Write each layer to GeoPackage + for layer_name, geojson in all_data: + features = geojson.get("features", []) + if not features: + _logger.info("Skipping empty layer: %s", layer_name) + continue + + # Determine geometry type from first feature + geometry_type = self._get_geometry_type(features[0]) + + # Build schema from first feature + schema = self._build_schema(features[0], geometry_type) + + # Write layer to GeoPackage + with fiona.open( + gpkg_path, + mode="w", + driver="GPKG", + layer=layer_name, + crs=from_epsg(4326), # WGS84 + schema=schema, + ) as layer: + for feature in features: + # Ensure feature has valid structure + if feature.get("geometry") and feature.get("properties"): + layer.write(feature) + + _logger.info("Wrote layer %s with %d features to GeoPackage", layer_name, len(features)) + + # Read GeoPackage file + with open(gpkg_path, "rb") as f: + gpkg_bytes = f.read() + + return ( + gpkg_bytes, + "openspp_export.gpkg", + "application/geopackage+sqlite3", + ) + + finally: + # Clean up temporary file + try: + Path(gpkg_path).unlink() + except Exception as e: + _logger.warning("Failed to delete temporary GeoPackage: %s", e) + + def _create_geojson_zip(self, layers_data, geofences_data): + """Create ZIP of GeoJSON files as fallback. + + Args: + layers_data: List of (name, geojson) tuples + geofences_data: List of (name, geojson) tuples + + Returns: + tuple: (bytes, filename, content_type) + """ + # Create ZIP file in memory + zip_buffer = io.BytesIO() + + with zipfile.ZipFile(zip_buffer, mode="w", compression=zipfile.ZIP_DEFLATED) as zip_file: + # Combine all data + all_data = layers_data + geofences_data + + for layer_name, geojson in all_data: + # Write GeoJSON to ZIP + geojson_str = json.dumps(geojson, indent=2) + zip_file.writestr(f"{layer_name}.geojson", geojson_str) + _logger.info("Added %s.geojson to ZIP", layer_name) + + zip_bytes = zip_buffer.getvalue() + + return ( + zip_bytes, + "openspp_export.zip", + "application/zip", + ) + + def _get_geometry_type(self, feature): + """Extract geometry type from feature. + + Args: + feature: GeoJSON feature + + Returns: + str: Geometry type (Point, LineString, Polygon, etc.) + """ + geometry = feature.get("geometry") + if not geometry: + return "Point" # Default + + geom_type = geometry.get("type", "Point") + + # Map GeoJSON types to fiona types + type_mapping = { + "Point": "Point", + "MultiPoint": "MultiPoint", + "LineString": "LineString", + "MultiLineString": "MultiLineString", + "Polygon": "Polygon", + "MultiPolygon": "MultiPolygon", + } + + return type_mapping.get(geom_type, "Polygon") + + def _build_schema(self, feature, geometry_type): + """Build fiona schema from feature properties. + + Args: + feature: GeoJSON feature + geometry_type: Geometry type string + + Returns: + dict: Fiona schema definition + """ + properties = feature.get("properties", {}) + + # Build property schema + prop_schema = {} + for key, value in properties.items(): + if isinstance(value, bool): + prop_schema[key] = "bool" + elif isinstance(value, int): + prop_schema[key] = "int" + elif isinstance(value, float): + prop_schema[key] = "float" + else: + prop_schema[key] = "str" + + return { + "geometry": geometry_type, + "properties": prop_schema, + } + + def _sanitize_filename(self, name): + """Sanitize string for use as filename. + + Args: + name: Original name + + Returns: + str: Sanitized name + """ + # Replace problematic characters + sanitized = name.replace(" ", "_").replace("/", "_").replace("\\", "_") + # Remove any non-alphanumeric except underscore and dash + sanitized = "".join(c for c in sanitized if c.isalnum() or c in ("_", "-")) + return sanitized or "layer" diff --git a/spp_api_v2_gis/services/layers_service.py b/spp_api_v2_gis/services/layers_service.py new file mode 100644 index 00000000..1615e043 --- /dev/null +++ b/spp_api_v2_gis/services/layers_service.py @@ -0,0 +1,765 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Service for retrieving GIS layer data as GeoJSON.""" + +import copy +import json +import logging +import threading +import time + +from odoo.exceptions import MissingError + +_logger = logging.getLogger(__name__) + +# Thread-safe TTL cache for report GeoJSON. +# Key: (report_code, admin_level) +# Value: {"data": , "timestamp": } +_report_geojson_cache = {} +_report_cache_lock = threading.Lock() +_REPORT_CACHE_TTL = 60 # seconds + + +class LayersService: + """Service for retrieving GIS layer data.""" + + def __init__(self, env): + """Initialize layers service.""" + self.env = env + + def get_layer_geojson( + self, + layer_id, + layer_type="report", + admin_level=None, + area_codes=None, + parent_area_code=None, + include_geometry=True, + include_disaggregation=False, + limit=None, + offset=0, + bbox=None, + ): + """Get layer data as GeoJSON FeatureCollection. + + Args: + layer_id: Layer identifier (report code or layer database ID) + layer_type: Type of layer - "report" or "layer" + admin_level: Filter to specific admin level (optional) + area_codes: List of area codes to filter (optional) + parent_area_code: Parent area code to filter children (optional) + include_geometry: Include polygon geometry (default: True) + include_disaggregation: Include disaggregation data (default: False) + limit: Maximum number of features to return (optional) + offset: Number of features to skip (default: 0) + bbox: Bounding box filter [west, south, east, north] (optional) + + Returns: + dict: GeoJSON FeatureCollection with styling hints + + Raises: + MissingError: If layer not found + ValueError: If layer_type is invalid + """ + if layer_type == "report": + return self._get_report_geojson( + layer_id, + admin_level=admin_level, + area_codes=area_codes, + parent_area_code=parent_area_code, + include_geometry=include_geometry, + include_disaggregation=include_disaggregation, + bbox=bbox, + ) + elif layer_type == "layer": + return self._get_data_layer_geojson( + layer_id, + include_geometry=include_geometry, + limit=limit, + offset=offset, + bbox=bbox, + ) + else: + raise ValueError(f"Invalid layer_type: {layer_type}. Must be 'report' or 'layer'") + + def _get_report_geojson( + self, + report_code, + admin_level=None, + area_codes=None, + parent_area_code=None, + include_geometry=True, + include_disaggregation=False, + bbox=None, + ): + """Get GIS report data as GeoJSON. + + For the common OAPIF path (no area_codes, no parent_area_code, default + geometry and disaggregation settings), the full report GeoJSON is cached + in memory with a short TTL. Bbox filtering is then applied in Python on + the cached data, avoiding repeated expensive report generation when QGIS + sends parallel tiled requests. + + Args: + report_code: Report code (external identifier) + admin_level: Filter to specific admin level + area_codes: List of area codes to filter + parent_area_code: Parent area code to filter children + include_geometry: Include polygon geometry + include_disaggregation: Include disaggregation data + bbox: Bounding box filter [west, south, east, north] (optional) + + Returns: + dict: GeoJSON FeatureCollection with styling hints + """ + # Fast path: common OAPIF requests (no area/parent filters, default settings) + # can use cached report GeoJSON with Python-level bbox filtering. + is_cacheable = not area_codes and not parent_area_code and include_geometry and not include_disaggregation + + if is_cacheable: + return self._get_report_geojson_cached(report_code, admin_level, bbox) + + # Slow path: custom filters bypass cache + return self._get_report_geojson_uncached( + report_code, + admin_level, + area_codes, + parent_area_code, + include_geometry, + include_disaggregation, + bbox, + ) + + def _get_report_geojson_cached(self, report_code, admin_level, bbox): + """Get report GeoJSON using cache, with Python bbox filtering. + + Args: + report_code: Report code + admin_level: Admin level filter + bbox: Bounding box [west, south, east, north] or None + + Returns: + dict: GeoJSON FeatureCollection (deep-copied from cache) + """ + cache_key = (report_code, admin_level) + + # Check cache + with _report_cache_lock: + entry = _report_geojson_cache.get(cache_key) + if entry and (time.time() - entry["timestamp"]) < _REPORT_CACHE_TTL: + geojson = copy.deepcopy(entry["data"]) + _logger.debug("Cache hit for report %s admin_level=%s", report_code, admin_level) + if bbox: + geojson["features"] = filter_features_by_bbox(geojson.get("features", []), bbox) + return geojson + + # Cache miss — generate full GeoJSON (no bbox filter at DB level) + geojson = self._get_report_geojson_uncached( + report_code, + admin_level, + area_codes=None, + parent_area_code=None, + include_geometry=True, + include_disaggregation=False, + bbox=None, + ) + + # Store in cache + with _report_cache_lock: + _report_geojson_cache[cache_key] = { + "data": copy.deepcopy(geojson), + "timestamp": time.time(), + } + + # Apply bbox filtering in Python on the result + if bbox: + geojson["features"] = filter_features_by_bbox(geojson.get("features", []), bbox) + + return geojson + + def _get_report_geojson_uncached( + self, + report_code, + admin_level=None, + area_codes=None, + parent_area_code=None, + include_geometry=True, + include_disaggregation=False, + bbox=None, + ): + """Generate report GeoJSON without caching. + + Args: + report_code: Report code (external identifier) + admin_level: Filter to specific admin level + area_codes: List of area codes to filter + parent_area_code: Parent area code to filter children + include_geometry: Include polygon geometry + include_disaggregation: Include disaggregation data + bbox: Bounding box filter [west, south, east, north] (optional) + + Returns: + dict: GeoJSON FeatureCollection with styling hints + """ + Report = self.env["spp.gis.report"].sudo() + report = Report.search([("code", "=", report_code)], limit=1) + + if not report: + raise MissingError(f"Report not found: {report_code}") + + # Resolve area codes to IDs + area_ids = self._resolve_area_codes(area_codes) if area_codes else None + + # Resolve parent area code + parent_area_id = None + if parent_area_code: + parent_area = self.env["spp.area"].sudo().search([("code", "=", parent_area_code)], limit=1) + parent_area_id = parent_area.id if parent_area else None + + # Apply bbox spatial filter via PostGIS ST_Intersects + if bbox: + bbox_geojson = self._bbox_to_geojson(bbox) + matching_areas = self.env["spp.area"].sudo().search([("geo_polygon", "gis_intersects", bbox_geojson)]) + if area_ids: + area_ids = list(set(area_ids) & set(matching_areas.ids)) + else: + area_ids = matching_areas.ids + + # Get GeoJSON from report + geojson = report._to_geojson( + admin_level=admin_level, + area_ids=area_ids, + parent_area_id=parent_area_id, + include_geometry=include_geometry, + include_disaggregation=include_disaggregation, + ) + + # Add styling hints to metadata + styling = self._build_report_styling(report) + if "metadata" not in geojson: + geojson["metadata"] = {} + geojson["metadata"]["styling"] = styling + + # Also add styling at root level for convenience + geojson["styling"] = styling + + _logger.info("Generated GeoJSON for report: %s with %d features", report_code, len(geojson.get("features", []))) + + return geojson + + def _get_data_layer_geojson(self, layer_id, include_geometry=True, limit=None, offset=0, bbox=None): + """Get data layer features as GeoJSON. + + Args: + layer_id: Layer database ID + include_geometry: Include polygon geometry + limit: Maximum features to return (optional) + offset: Number of features to skip (default: 0) + bbox: Bounding box filter [west, south, east, north] (optional) + + Returns: + dict: GeoJSON FeatureCollection with styling hints + """ + try: + layer_id_int = int(layer_id) + except (ValueError, TypeError) as e: + raise ValueError(f"Invalid layer_id: {layer_id}") from e + + Layer = self.env["spp.gis.data.layer"].sudo() + layer = Layer.browse(layer_id_int) + + if not layer.exists(): + raise MissingError(f"Layer not found: {layer_id}") + + # If this is a report-driven layer, delegate to report handler + if hasattr(layer, "source_type") and layer.source_type == "report": + if layer.report_id: + return self._get_report_geojson( + layer.report_id.code, + include_geometry=include_geometry, + bbox=bbox, + ) + raise MissingError(f"Layer {layer_id} is report-driven but has no report configured") + + # For model-driven layers, fetch features from the source model + features = self._fetch_layer_features(layer, include_geometry, limit=limit, offset=offset, bbox=bbox) + + # Build styling hints + styling = self._build_layer_styling(layer) + + geojson = { + "type": "FeatureCollection", + "features": features, + "metadata": { + "layer": { + "id": layer.id, + "name": layer.name, + "model": layer.model_name, + }, + "styling": styling, + }, + "styling": styling, + } + + _logger.info("Generated GeoJSON for layer: %s with %d features", layer.name, len(features)) + + return geojson + + def _fetch_layer_features(self, layer, include_geometry, limit=None, offset=0, bbox=None): + """Fetch features from a model-driven layer. + + Args: + layer: spp.gis.data.layer record + include_geometry: Include geometry in features + limit: Maximum records to return (default: 5000 safety limit) + offset: Number of records to skip (default: 0) + bbox: Bounding box filter [west, south, east, north] (optional) + + Returns: + list: List of GeoJSON features + """ + if not layer.model_name or not layer.geo_field_id: + _logger.warning("Layer %s has no model or geo field configured", layer.name) + return [] + + Model = self.env[layer.model_name].sudo() + + # Build domain + domain = [] + if layer.domain: + try: + from ast import literal_eval + + domain = literal_eval(layer.domain) + except (ValueError, SyntaxError) as e: + _logger.warning("Invalid domain on layer %s: %s", layer.name, e) + + # Apply bbox spatial filter via PostGIS ST_Intersects + if bbox: + geo_field_name = layer.geo_field_id.name + bbox_geojson = self._bbox_to_geojson(bbox) + domain.append((geo_field_name, "gis_intersects", bbox_geojson)) + + # Apply safety limit if none specified + search_limit = min(limit, 5000) if limit else 5000 + + # Search for records with pagination pushed to database + records = Model.search(domain, limit=search_limit, offset=offset) + + features = [] + geo_field_name = layer.geo_field_id.name + + for record in records: + # Build properties + properties = { + "id": record.id, + "name": record.display_name, + } + + # Add choropleth value if configured + if hasattr(layer, "choropleth_field_id") and layer.choropleth_field_id: + field_name = layer.choropleth_field_id.name + if hasattr(record, field_name): + properties["value"] = getattr(record, field_name) + + # Build feature + feature = { + "type": "Feature", + "id": record.id, + "properties": properties, + "geometry": None, + } + + # Add geometry if requested + if include_geometry and hasattr(record, geo_field_name): + geo_value = getattr(record, geo_field_name) + if geo_value: + try: + # Try parsing as JSON first (GeoJSON format) + geometry = json.loads(geo_value) + feature["geometry"] = geometry + except (json.JSONDecodeError, TypeError): + # Try parsing as WKT + try: + from shapely import wkt + + shape = wkt.loads(geo_value) + feature["geometry"] = shape.__geo_interface__ + except ImportError: + _logger.warning("shapely not available for WKT parsing") + except Exception as e: + _logger.warning("Failed to parse geometry: %s", e) + + features.append(feature) + + return features + + def get_feature_count(self, layer_id, layer_type="report", admin_level=None): + """Get total feature count without loading data. + + Args: + layer_id: Layer identifier (report code or layer database ID) + layer_type: Type of layer - "report" or "layer" + admin_level: Filter to specific admin level (optional, reports only) + + Returns: + int: Total number of features + + Raises: + MissingError: If layer not found + """ + if layer_type == "report": + report = self.env["spp.gis.report"].sudo().search([("code", "=", layer_id)], limit=1) + if not report: + raise MissingError(f"Report not found: {layer_id}") + domain = [("report_id", "=", report.id)] + if admin_level is not None: + domain.append(("area_level", "=", admin_level)) + return self.env["spp.gis.report.data"].sudo().search_count(domain) + elif layer_type == "layer": + try: + layer_id_int = int(layer_id) + except (ValueError, TypeError) as e: + raise ValueError(f"Invalid layer_id: {layer_id}") from e + + layer = self.env["spp.gis.data.layer"].sudo().browse(layer_id_int) + if not layer.exists(): + raise MissingError(f"Layer not found: {layer_id}") + + Model = self.env[layer.model_name].sudo() + domain = [] + if layer.domain: + try: + from ast import literal_eval + + domain = literal_eval(layer.domain) + except (ValueError, SyntaxError): + pass + return Model.search_count(domain) + else: + raise ValueError(f"Invalid layer_type: {layer_type}") + + def get_feature_by_id(self, layer_id, feature_id, layer_type="report"): + """Get a single feature by ID without loading the full collection. + + Args: + layer_id: Layer identifier (report code or layer database ID) + feature_id: Feature identifier (area_code for reports, record ID for layers) + layer_type: Type of layer - "report" or "layer" + + Returns: + dict: GeoJSON Feature + + Raises: + MissingError: If layer or feature not found + """ + if layer_type == "report": + return self._get_report_feature_by_id(layer_id, feature_id) + elif layer_type == "layer": + return self._get_layer_feature_by_id(layer_id, feature_id) + else: + raise ValueError(f"Invalid layer_type: {layer_type}") + + def _get_report_feature_by_id(self, report_code, feature_id): + """Get a single report feature by area_code. + + Args: + report_code: Report code + feature_id: Area code (used as feature ID in report GeoJSON) + + Returns: + dict: GeoJSON Feature + + Raises: + MissingError: If report or feature not found + """ + report = self.env["spp.gis.report"].sudo().search([("code", "=", report_code)], limit=1) + if not report: + raise MissingError(f"Report not found: {report_code}") + + data = ( + self.env["spp.gis.report.data"] + .sudo() + .search( + [("report_id", "=", report.id), ("area_code", "=", str(feature_id))], + limit=1, + ) + ) + if not data: + raise MissingError(f"Feature {feature_id} not found in report {report_code}") + + # Build properties + has_data = data.raw_value is not None + properties = { + "area_id": data.area_id.id, + "area_code": data.area_code, + "area_name": data.area_name, + "area_level": data.area_level, + "has_data": has_data, + "raw_value": data.raw_value, + "normalized_value": data.normalized_value, + "display_value": data.display_value if has_data else "No Data", + "record_count": data.record_count, + } + + # Build geometry + geometry = None + if data.area_id.geo_polygon: + try: + geo = data.area_id.geo_polygon + # GeoPolygonField may return a Shapely geometry object + # or a WKT/WKB string depending on the spp_gis version. + if hasattr(geo, "__geo_interface__"): + geometry = geo.__geo_interface__ + else: + from shapely import wkt + + shape = wkt.loads(geo) + geometry = shape.__geo_interface__ + except (ImportError, Exception) as e: + _logger.warning("Failed to parse geometry for area %s: %s", data.area_code, e) + + return { + "type": "Feature", + "id": data.area_code, + "properties": properties, + "geometry": geometry, + } + + def _get_layer_feature_by_id(self, layer_id, feature_id): + """Get a single data layer feature by record ID. + + Args: + layer_id: Layer database ID + feature_id: Record ID in the source model + + Returns: + dict: GeoJSON Feature + + Raises: + MissingError: If layer or feature not found + """ + try: + layer_id_int = int(layer_id) + except (ValueError, TypeError) as e: + raise ValueError(f"Invalid layer_id: {layer_id}") from e + + layer = self.env["spp.gis.data.layer"].sudo().browse(layer_id_int) + if not layer.exists(): + raise MissingError(f"Layer not found: {layer_id}") + + try: + feature_id_int = int(feature_id) + except (ValueError, TypeError) as e: + raise MissingError(f"Feature {feature_id} not found in layer {layer_id}") from e + + Model = self.env[layer.model_name].sudo() + record = Model.browse(feature_id_int) + if not record.exists(): + raise MissingError(f"Feature {feature_id} not found in layer {layer_id}") + + geo_field_name = layer.geo_field_id.name + properties = { + "id": record.id, + "name": record.display_name, + } + + geometry = None + if hasattr(record, geo_field_name): + geo_value = getattr(record, geo_field_name) + if geo_value: + try: + geometry = json.loads(geo_value) + except (json.JSONDecodeError, TypeError): + try: + from shapely import wkt + + shape = wkt.loads(geo_value) + geometry = shape.__geo_interface__ + except (ImportError, Exception) as e: + _logger.warning("Failed to parse geometry: %s", e) + + return { + "type": "Feature", + "id": record.id, + "properties": properties, + "geometry": geometry, + } + + def _build_report_styling(self, report): + """Build styling hints from report configuration. + + Args: + report: spp.gis.report record + + Returns: + dict: Styling configuration + """ + styling = { + "geometry_type": report.geometry_type, + "color_scheme": None, + "thresholds": [], + "threshold_mode": report.threshold_mode, + } + + # Add color scheme + if report.color_scheme_id: + styling["color_scheme"] = { + "code": report.color_scheme_id.code, + "name": report.color_scheme_id.name, + "type": report.color_scheme_id.scheme_type, + "colors": report.color_scheme_id.get_colors_list() + if hasattr(report.color_scheme_id, "get_colors_list") + else [], + } + + # Add thresholds + for threshold in report.threshold_ids.sorted("sequence"): + styling["thresholds"].append( + { + "min_value": threshold.min_value, + "max_value": threshold.max_value, + "color": threshold.color, + "label": threshold.label, + } + ) + + return styling + + def _build_layer_styling(self, layer): + """Build styling hints from data layer configuration. + + Args: + layer: spp.gis.data.layer record + + Returns: + dict: Styling configuration + """ + styling = { + "geometry_type": "polygon", # Default + "representation": layer.geo_repr, + } + + # Add choropleth configuration if available + if hasattr(layer, "_get_choropleth_config"): + choropleth_config = layer._get_choropleth_config() + if choropleth_config: + styling["choropleth"] = choropleth_config + + # Add report-driven layer styling if available + if hasattr(layer, "get_layer_style"): + try: + layer_style = layer.get_layer_style() + styling.update(layer_style) + except Exception as e: + _logger.warning("Failed to get layer style: %s", e) + + return styling + + def _bbox_to_geojson(self, bbox): + """Convert bounding box to GeoJSON Polygon for gis_intersects operator. + + Args: + bbox: [west, south, east, north] + + Returns: + dict: GeoJSON Polygon geometry + """ + west, south, east, north = bbox + return { + "type": "Polygon", + "coordinates": [ + [ + [west, south], + [east, south], + [east, north], + [west, north], + [west, south], + ] + ], + } + + def _resolve_area_codes(self, area_codes): + """Convert area codes to IDs. + + Args: + area_codes: List of area codes + + Returns: + list: Area IDs or None + """ + if not area_codes: + return None + + areas = self.env["spp.area"].sudo().search([("code", "in", area_codes)]) + return areas.ids if areas else None + + +def filter_features_by_bbox(features, bbox): + """Filter GeoJSON features by bounding box overlap. + + Uses bounding-box-of-geometry vs query-bbox intersection test. + This is an approximation (same as PostGIS && operator) that is + fast and sufficient for OAPIF tiled fetching. + + Args: + features: List of GeoJSON Feature dicts + bbox: [west, south, east, north] + + Returns: + list: Features whose geometry bbox overlaps the query bbox + """ + west, south, east, north = bbox + result = [] + + for feature in features: + geometry = feature.get("geometry") + if not geometry: + continue + + coords = _extract_all_coordinates(geometry) + if not coords: + continue + + # Compute feature's bounding box from coordinates + feature_west = min(c[0] for c in coords) + feature_east = max(c[0] for c in coords) + feature_south = min(c[1] for c in coords) + feature_north = max(c[1] for c in coords) + + # Bounding box overlap test + if feature_east >= west and feature_west <= east and feature_north >= south and feature_south <= north: + result.append(feature) + + return result + + +def _extract_all_coordinates(geometry): + """Extract all coordinate pairs from a GeoJSON geometry. + + Handles Polygon, MultiPolygon, Point, MultiPoint, LineString, + and MultiLineString geometry types. + + Args: + geometry: GeoJSON geometry dict with "type" and "coordinates" + + Returns: + list: Flat list of [lon, lat] coordinate pairs, or empty list + """ + geom_type = geometry.get("type", "") + coordinates = geometry.get("coordinates") + if not coordinates: + return [] + + coords = [] + if geom_type == "Point": + coords.append(coordinates) + elif geom_type in ("MultiPoint", "LineString"): + coords.extend(coordinates) + elif geom_type in ("Polygon", "MultiLineString"): + for ring in coordinates: + coords.extend(ring) + elif geom_type == "MultiPolygon": + for polygon in coordinates: + for ring in polygon: + coords.extend(ring) + return coords diff --git a/spp_api_v2_gis/services/ogc_service.py b/spp_api_v2_gis/services/ogc_service.py new file mode 100644 index 00000000..7279fcff --- /dev/null +++ b/spp_api_v2_gis/services/ogc_service.py @@ -0,0 +1,515 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""OGC API - Features service adapter. + +Translates OGC API parameters to existing CatalogService and LayersService +calls, producing OGC-compliant responses for GovStack GIS BB compliance. +""" + +import logging +import re + +from odoo.exceptions import MissingError + +from .catalog_service import CatalogService +from .layers_service import LayersService + +_logger = logging.getLogger(__name__) + +# OGC API - Features conformance classes +CONFORMANCE_CLASSES = [ + "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core", + "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30", + "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson", +] + + +class OGCService: + """Adapter service for OGC API - Features. + + Wraps CatalogService and LayersService to produce OGC-compliant + responses from existing GIS data sources. + """ + + def __init__(self, env, base_url=""): + """Initialize OGC service. + + Args: + env: Odoo environment for database access + base_url: Base URL for generating self-referencing links + """ + self.env = env + self.base_url = base_url.rstrip("/") + self.catalog_service = CatalogService(env) + self.layers_service = LayersService(env) + + def get_landing_page(self): + """Build OGC API landing page. + + Returns: + dict: Landing page with navigation links + """ + ogc_base = f"{self.base_url}/gis/ogc" + return { + "title": "OpenSPP GIS API", + "description": ( + "OGC API - Features endpoints for OpenSPP geospatial data. " + "Provides access to GIS reports and data layers as OGC-compliant " + "feature collections." + ), + "links": [ + { + "href": ogc_base, + "rel": "self", + "type": "application/json", + "title": "This document", + }, + { + "href": f"{ogc_base}/conformance", + "rel": "conformance", + "type": "application/json", + "title": "OGC API conformance classes", + }, + { + "href": f"{ogc_base}/collections", + "rel": "data", + "type": "application/json", + "title": "Feature collections", + }, + { + "href": f"{self.base_url}/openapi.json", + "rel": "service-desc", + "type": "application/vnd.oai.openapi+json;version=3.0", + "title": "OpenAPI definition", + }, + ], + } + + def get_conformance(self): + """Build OGC API conformance declaration. + + Returns: + dict: Conformance class URIs + """ + return {"conformsTo": CONFORMANCE_CLASSES} + + def get_collections(self): + """Build OGC collections list from catalog. + + Each report fans out into one collection per available admin level. + This prevents larger polygons from overlapping smaller ones. + + Returns: + dict: Collections response with links + """ + catalog = self.catalog_service.get_catalog() + ogc_base = f"{self.base_url}/gis/ogc" + area_level_names = catalog.get("area_level_names", {}) + + collections = [] + + # Map reports to collections — one per available admin level + for report in catalog.get("reports", []): + levels = report.get("admin_levels_available", []) + if not levels: + # Fallback: single collection at base_area_level + levels = [report["area_level"]] + for level in levels: + collection = self._report_to_collection(report, admin_level=level, area_level_names=area_level_names) + collections.append(collection) + + # Map data layers to collections + for layer in catalog.get("data_layers", []): + collection = self._data_layer_to_collection(layer) + collections.append(collection) + + return { + "links": [ + { + "href": f"{ogc_base}/collections", + "rel": "self", + "type": "application/json", + "title": "This document", + }, + ], + "collections": collections, + } + + def get_collection(self, collection_id): + """Get single collection metadata. + + Supports three ID formats: + - "layer_{id}" — data layer + - "{code}_adm{N}" — report at specific admin level + - "{code}" (bare) — backward compat, defaults to base_area_level + + Args: + collection_id: Collection identifier + + Returns: + dict: Collection metadata + + Raises: + MissingError: If collection not found + """ + layer_type, layer_id, admin_level = self._parse_collection_id(collection_id) + + # Data layer lookup + if layer_type == "layer": + catalog = self.catalog_service.get_catalog() + for layer in catalog.get("data_layers", []): + if layer["id"] == layer_id: + return self._data_layer_to_collection(layer) + raise MissingError(f"Collection not found: {collection_id}") + + # Report lookup + catalog = self.catalog_service.get_catalog() + area_level_names = catalog.get("area_level_names", {}) + + for report in catalog.get("reports", []): + if report["id"] == layer_id: + # If bare code (no _admN suffix), default to base_area_level + if admin_level is None: + admin_level = report["area_level"] + return self._report_to_collection(report, admin_level=admin_level, area_level_names=area_level_names) + + raise MissingError(f"Collection not found: {collection_id}") + + def get_collection_items( + self, + collection_id, + limit=1000, + offset=0, + bbox=None, + ): + """Get features from a collection. + + For data layers, pagination is pushed to the database via ORM + search(limit, offset). For reports, the dataset is bounded by + geographic area count (typically hundreds) so Python-level + pagination is acceptable. + + Args: + collection_id: Collection identifier + limit: Maximum features to return (default 1000) + offset: Pagination offset + bbox: Bounding box filter [west, south, east, north] + + Returns: + dict: GeoJSON FeatureCollection with OGC pagination links + + Raises: + MissingError: If collection not found + """ + layer_type, layer_id, admin_level = self._parse_collection_id(collection_id) + + # For bare report codes, default to base_area_level + if layer_type == "report" and admin_level is None: + admin_level = self._get_report_base_level(layer_id) + + # Get total count without loading all features + total_count = self.layers_service.get_feature_count( + layer_id=layer_id, + layer_type=layer_type, + admin_level=admin_level, + ) + + # Fetch features with pagination and spatial filter pushed to PostGIS + geojson = self.layers_service.get_layer_geojson( + layer_id=layer_id, + layer_type=layer_type, + admin_level=admin_level, + limit=limit, + offset=offset, + bbox=bbox, + ) + + features = geojson.get("features", []) + + # Apply Python-level pagination for report layers. + # Reports return all features from _to_geojson() since they are + # bounded by area count. Data layers handle pagination at the DB level. + if layer_type == "report" and (offset > 0 or len(features) > limit): + features = features[offset : offset + limit] + + # Build OGC response + ogc_base = f"{self.base_url}/gis/ogc" + items_url = f"{ogc_base}/collections/{collection_id}/items" + + links = [ + { + "href": f"{items_url}?limit={limit}&offset={offset}", + "rel": "self", + "type": "application/geo+json", + "title": "This page", + }, + { + "href": f"{ogc_base}/collections/{collection_id}", + "rel": "collection", + "type": "application/json", + "title": "Collection metadata", + }, + ] + + # Add next link if more features exist + if offset + limit < total_count: + next_offset = offset + limit + links.append( + { + "href": f"{items_url}?limit={limit}&offset={next_offset}", + "rel": "next", + "type": "application/geo+json", + "title": "Next page", + } + ) + + # Add previous link if not on first page + if offset > 0: + previous_offset = max(0, offset - limit) + links.append( + { + "href": f"{items_url}?limit={limit}&offset={previous_offset}", + "rel": "prev", + "type": "application/geo+json", + "title": "Previous page", + } + ) + + return { + "type": "FeatureCollection", + "features": features, + "links": links, + "numberMatched": total_count, + "numberReturned": len(features), + } + + def get_collection_item(self, collection_id, feature_id): + """Get single feature from a collection. + + Queries the specific record directly by ID without loading the + full collection. + + Args: + collection_id: Collection identifier + feature_id: Feature identifier + + Returns: + dict: GeoJSON Feature + + Raises: + MissingError: If collection or feature not found + """ + layer_type, layer_id, _admin_level = self._parse_collection_id(collection_id) + + feature = self.layers_service.get_feature_by_id( + layer_id=layer_id, + feature_id=feature_id, + layer_type=layer_type, + ) + + # Add OGC links + ogc_base = f"{self.base_url}/gis/ogc" + feature.setdefault("links", []) + feature["links"].append( + { + "href": f"{ogc_base}/collections/{collection_id}/items/{feature_id}", + "rel": "self", + "type": "application/geo+json", + } + ) + feature["links"].append( + { + "href": f"{ogc_base}/collections/{collection_id}", + "rel": "collection", + "type": "application/json", + } + ) + return feature + + def _report_to_collection(self, report, admin_level=None, area_level_names=None): + """Convert a catalog report to an OGC collection. + + When admin_level is provided, the collection ID becomes + "{code}_adm{level}" and the title includes the level name. + + Args: + report: Report info dict from CatalogService + admin_level: Admin level for this collection (optional) + area_level_names: Dict mapping area_level to type name (optional) + + Returns: + dict: OGC CollectionInfo + """ + report_code = report["id"] + ogc_base = f"{self.base_url}/gis/ogc" + + # Build collection ID and title with admin level suffix + if admin_level is not None: + collection_id = f"{report_code}_adm{admin_level}" + level_name = (area_level_names or {}).get(admin_level, f"Level {admin_level}") + title = f"{report['name']} ({level_name})" + else: + collection_id = report_code + title = report["name"] + + collection = { + "id": collection_id, + "title": title, + "description": report.get("description"), + "itemType": "feature", + "crs": [ + "http://www.opengis.net/def/crs/OGC/1.3/CRS84", + ], + "links": [ + { + "href": f"{ogc_base}/collections/{collection_id}", + "rel": "self", + "type": "application/json", + "title": "Collection metadata", + }, + { + "href": f"{ogc_base}/collections/{collection_id}/items", + "rel": "items", + "type": "application/geo+json", + "title": "Feature items", + }, + { + "href": f"{ogc_base}/collections/{collection_id}/qml", + "rel": "describedby", + "type": "text/xml", + "title": "QGIS style file (QML)", + }, + ], + } + + # Build extent with spatial bbox and optional temporal info + extent = {} + + spatial_bbox = self._compute_report_bbox(report_code) + if spatial_bbox: + extent["spatial"] = {"bbox": [spatial_bbox], "crs": "http://www.opengis.net/def/crs/OGC/1.3/CRS84"} + + if report.get("last_refresh"): + extent["temporal"] = {"interval": [[report["last_refresh"], None]]} + + if extent: + collection["extent"] = extent + + return collection + + def _data_layer_to_collection(self, layer): + """Convert a data layer to an OGC collection. + + Args: + layer: Data layer info dict from CatalogService + + Returns: + dict: OGC CollectionInfo + """ + collection_id = f"layer_{layer['id']}" + ogc_base = f"{self.base_url}/gis/ogc" + + links = [ + { + "href": f"{ogc_base}/collections/{collection_id}", + "rel": "self", + "type": "application/json", + "title": "Collection metadata", + }, + { + "href": f"{ogc_base}/collections/{collection_id}/items", + "rel": "items", + "type": "application/geo+json", + "title": "Feature items", + }, + ] + + # Add QML link for report-driven data layers + if layer.get("report_code"): + links.append( + { + "href": f"{ogc_base}/collections/{collection_id}/qml", + "rel": "describedby", + "type": "text/xml", + "title": "QGIS style file (QML)", + } + ) + + return { + "id": collection_id, + "title": layer["name"], + "description": f"Data layer from {layer.get('source_model', 'unknown')}", + "itemType": "feature", + "crs": ["http://www.opengis.net/def/crs/OGC/1.3/CRS84"], + "links": links, + } + + def _compute_report_bbox(self, report_code): + """Compute spatial bounding box for a report's areas via PostGIS. + + Args: + report_code: Report code (collection ID) + + Returns: + list: [west, south, east, north] or None if no geometry + """ + try: + self.env.cr.execute( + """ + SELECT + ST_XMin(ST_Extent(a.geo_polygon)), + ST_YMin(ST_Extent(a.geo_polygon)), + ST_XMax(ST_Extent(a.geo_polygon)), + ST_YMax(ST_Extent(a.geo_polygon)) + FROM spp_gis_report_data d + JOIN spp_area a ON a.id = d.area_id + JOIN spp_gis_report r ON r.id = d.report_id + WHERE r.code = %s AND a.geo_polygon IS NOT NULL + """, + (report_code,), + ) + row = self.env.cr.fetchone() + if row and row[0] is not None: + return [row[0], row[1], row[2], row[3]] + except Exception as e: + _logger.warning("Failed to compute bbox for report %s: %s", report_code, e) + return None + + def _parse_collection_id(self, collection_id): + """Parse collection ID into layer type, layer ID, and admin level. + + Supported formats: + - "layer_{id}" → ("layer", "{id}", None) + - "{code}_adm{N}" → ("report", "{code}", N) + - "{code}" → ("report", "{code}", None) + + Args: + collection_id: Collection identifier + + Returns: + tuple: (layer_type, layer_id, admin_level) + """ + if collection_id.startswith("layer_"): + return "layer", collection_id[6:], None + + match = re.match(r"^(.+)_adm(\d+)$", collection_id) + if match: + return "report", match.group(1), int(match.group(2)) + + return "report", collection_id, None + + def _get_report_base_level(self, report_code): + """Look up the base_area_level for a report by code. + + Used as default admin_level when a bare code is provided + (no _admN suffix) for backward compatibility. + + Args: + report_code: Report code + + Returns: + int: base_area_level or None if report not found + """ + report = self.env["spp.gis.report"].sudo().search([("code", "=", report_code)], limit=1) + if report: + return report.base_area_level + return None diff --git a/spp_api_v2_gis/services/qml_template_service.py b/spp_api_v2_gis/services/qml_template_service.py new file mode 100644 index 00000000..8bdd39ec --- /dev/null +++ b/spp_api_v2_gis/services/qml_template_service.py @@ -0,0 +1,394 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""QML Template Service for QGIS styling. + +Generates QML (QGIS Layer Definition) files from templates, injecting +values from GIS report configurations such as color schemes, thresholds, +and opacity settings. +""" + +import logging +import os + +_logger = logging.getLogger(__name__) + + +class QMLTemplateService: + """Service for generating QML style files from templates.""" + + TEMPLATE_DIR = os.path.join( + os.path.dirname(os.path.dirname(__file__)), + "data", + "qml_templates", + ) + + # Map geometry types to template files + TEMPLATE_MAP = { + "polygon": "graduated_polygon.qml", + "point": "point_basic.qml", + "cluster": "point_cluster.qml", + "heatmap": "point_basic.qml", # Fallback to basic point + } + + def __init__(self, env): + """Initialize service with Odoo environment. + + Args: + env: Odoo environment for database access + """ + self.env = env + + def generate_qml( + self, + report_id: int, + geometry_type: str, + field_name: str | None = None, + opacity: float = 0.7, + admin_level: int | None = None, + ) -> str: + """Generate QML XML for a GIS report. + + Args: + report_id: ID of spp.gis.report record + geometry_type: Type of geometry (polygon, point, cluster, heatmap) + field_name: Field name to symbolize (for choropleth) + opacity: Layer opacity (0.0 to 1.0) + admin_level: Admin level to adapt thresholds for (optional) + + Returns: + QML XML string + + Raises: + ValueError: If report not found or template missing + """ + # Get report record + report = self.env["spp.gis.report"].browse(report_id) + if not report.exists(): + raise ValueError(f"Report with ID {report_id} not found") + + # Load template + template_name = self.TEMPLATE_MAP.get(geometry_type, "point_basic.qml") + template_path = os.path.join(self.TEMPLATE_DIR, template_name) + + if not os.path.exists(template_path): + _logger.warning("QML template not found: %s", template_path) + raise ValueError(f"QML template not found: {template_name}") + + with open(template_path) as f: + template = f.read() + + # Generate QML based on geometry type + if geometry_type == "polygon": + return self._generate_graduated_polygon(template, report, field_name, opacity, admin_level=admin_level) + elif geometry_type in ("point", "cluster", "heatmap"): + return self._generate_point(template, report, opacity) + else: + raise ValueError(f"Unsupported geometry type: {geometry_type}") + + def _generate_graduated_polygon( + self, + template: str, + report, + field_name: str | None, + opacity: float, + admin_level: int | None = None, + ) -> str: + """Generate graduated polygon (choropleth) QML. + + When admin_level is provided, thresholds are adapted to the + data range at that level so features show meaningful color + variation even when global thresholds would compress them + into a single bucket. + + Args: + template: Template string + report: GIS report record + field_name: Field to symbolize + opacity: Layer opacity + admin_level: Admin level to compute per-level thresholds (optional) + + Returns: + Populated QML string + """ + # Use configured field or default + if not field_name: + field_name = "normalized_value" + + # Get color scheme + color_scheme = report.color_scheme_id + if not color_scheme: + # Fallback to default scheme + color_scheme = self.env["spp.gis.color.scheme"].get_default_scheme() + + # Get thresholds + thresholds = report.threshold_ids.sorted("sequence") + if not thresholds: + _logger.warning("No thresholds defined for report: %s", report.name) + # Generate default single-class QML + return self._generate_default_polygon(template, field_name, opacity) + + # When an admin level is specified, compute per-level thresholds + # to avoid all features falling into a single global bucket + if admin_level is not None: + level_thresholds = self._compute_level_thresholds(report, thresholds, field_name, admin_level) + if level_thresholds: + return self._render_graduated_polygon(template, level_thresholds, field_name, opacity) + + # Use global thresholds + threshold_defs = [] + for threshold in thresholds: + threshold_defs.append( + { + "lower": threshold.min_value if threshold.min_value is not None else 0, + "upper": threshold.max_value if threshold.max_value is not None else 999999, + "label": threshold.label or f"Class {len(threshold_defs) + 1}", + "color": threshold.color or "#808080", + } + ) + + return self._render_graduated_polygon(template, threshold_defs, field_name, opacity) + + def _compute_level_thresholds(self, report, global_thresholds, field_name, admin_level): + """Compute thresholds adapted to a specific admin level's data range. + + Uses equal-interval breaks across the level's actual data range, + preserving the global threshold colors and labels. + + Args: + report: GIS report record + global_thresholds: Report's threshold records + field_name: Field being symbolized + admin_level: Admin level to compute thresholds for + + Returns: + list[dict]: Per-level threshold definitions, or None to fall back to global + """ + # Query the data range at this admin level + data = ( + self.env["spp.gis.report.data"] + .sudo() + .search( + [ + ("report_id", "=", report.id), + ("area_level", "=", admin_level), + (field_name, "!=", False), + ] + ) + ) + + if not data: + return None + + values = [getattr(d, field_name) for d in data if getattr(d, field_name) is not None] + if not values: + return None + + min_val = min(values) + max_val = max(values) + + # If all values are the same, no differentiation is possible + if min_val == max_val: + return None + + # Check if global thresholds already provide good differentiation + # by counting how many distinct buckets the level's data falls into + sorted_thresholds = global_thresholds.sorted("sequence") + buckets_used = set() + for val in values: + for idx, threshold in enumerate(sorted_thresholds): + lower = threshold.min_value if threshold.min_value is not None else float("-inf") + upper = threshold.max_value if threshold.max_value is not None else float("inf") + if lower <= val <= upper: + buckets_used.add(idx) + break + # If data uses 2+ buckets, global thresholds work well enough + if len(buckets_used) >= 2: + return None + + # Recompute: equal-interval breaks using global colors/labels + number_of_classes = len(sorted_thresholds) + interval = (max_val - min_val) / number_of_classes + + # Add small padding to ensure min and max values are included + padded_min = min_val - (interval * 0.001) + padded_max = max_val + (interval * 0.001) + interval = (padded_max - padded_min) / number_of_classes + + level_thresholds = [] + for idx, threshold in enumerate(sorted_thresholds): + lower = padded_min + (idx * interval) + upper = padded_min + ((idx + 1) * interval) + level_thresholds.append( + { + "lower": round(lower, 6), + "upper": round(upper, 6), + "label": threshold.label or f"Class {idx + 1}", + "color": threshold.color or "#808080", + } + ) + + return level_thresholds + + def _render_graduated_polygon(self, template, threshold_defs, field_name, opacity): + """Render a graduated polygon QML from threshold definitions. + + Args: + template: Template string + threshold_defs: List of threshold dicts with lower, upper, label, color + field_name: Field to symbolize + opacity: Layer opacity + + Returns: + Populated QML string + """ + ranges_xml = [] + symbols_xml = [] + + for idx, threshold in enumerate(threshold_defs): + lower = threshold["lower"] + upper = threshold["upper"] + label = threshold["label"] + color_hex = threshold["color"] + + color_rgb = self._hex_to_rgb(color_hex) + + ranges_xml.append( + f' ' + ) + + symbols_xml.append( + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f" \n" + f" " + ) + + qml = template.replace("{{FIELD_NAME}}", field_name) + qml = qml.replace("{{RANGES}}", "\n".join(ranges_xml)) + qml = qml.replace("{{SYMBOLS}}", "\n".join(symbols_xml)) + qml = qml.replace("{{OPACITY}}", str(opacity)) + + return qml + + def _generate_point( + self, + template: str, + report, + opacity: float, + ) -> str: + """Generate point marker QML. + + Args: + template: Template string + report: GIS report record + opacity: Layer opacity + + Returns: + Populated QML string + """ + # Get color from color scheme + color_scheme = report.color_scheme_id + if not color_scheme: + color_scheme = self.env["spp.gis.color.scheme"].get_default_scheme() + + # Use first color from scheme + colors = color_scheme.get_colors_list() + color_hex = colors[0] if colors else "#3498db" + color_rgb = self._hex_to_rgb(color_hex) + + # Replace placeholders + qml = template.replace("{{COLOR}}", color_rgb) + qml = qml.replace("{{OPACITY}}", str(opacity)) + + return qml + + def _generate_default_polygon( + self, + template: str, + field_name: str, + opacity: float, + ) -> str: + """Generate default single-class polygon QML. + + Args: + template: Template string + field_name: Field name + opacity: Layer opacity + + Returns: + Populated QML string + """ + # Single default range + ranges_xml = ' ' + + # Single default symbol (blue) + symbols_xml = ( + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f' \n' + f" \n" + f" " + ) + + qml = template.replace("{{FIELD_NAME}}", field_name) + qml = qml.replace("{{RANGES}}", ranges_xml) + qml = qml.replace("{{SYMBOLS}}", symbols_xml) + qml = qml.replace("{{OPACITY}}", str(opacity)) + + return qml + + @staticmethod + def _hex_to_rgb(hex_color: str) -> str: + """Convert hex color to RGB string for QML. + + Args: + hex_color: Hex color string (e.g., '#3498db') + + Returns: + RGB string for QML (e.g., '52,152,219,255') + """ + hex_color = hex_color.lstrip("#") + try: + r = int(hex_color[0:2], 16) + g = int(hex_color[2:4], 16) + b = int(hex_color[4:6], 16) + return f"{r},{g},{b},255" + except (ValueError, IndexError): + # Fallback to gray + return "128,128,128,255" + + @staticmethod + def _escape_xml(text: str) -> str: + """Escape special XML characters. + + Args: + text: Text to escape + + Returns: + Escaped text + """ + if not text: + return "" + return ( + text.replace("&", "&") + .replace("<", "<") + .replace(">", ">") + .replace('"', """) + .replace("'", "'") + ) diff --git a/spp_api_v2_gis/services/spatial_query_service.py b/spp_api_v2_gis/services/spatial_query_service.py new file mode 100644 index 00000000..1caf9002 --- /dev/null +++ b/spp_api_v2_gis/services/spatial_query_service.py @@ -0,0 +1,748 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Service for executing PostGIS spatial queries.""" + +import json +import logging + +from odoo.addons.spp_aggregation.services import build_explicit_scope + +_logger = logging.getLogger(__name__) + + +class SpatialQueryService: + """Service for PostGIS spatial queries. + + This service handles spatial queries for registrants within a polygon. + It supports two query methods: + - coordinates: Direct spatial query when registrants have coordinates (preferred) + - area_fallback: Match via area_id when coordinates are not available + + Statistics computation: + - Delegates to AggregationService (spp_aggregation) + - AggregationService provides unified computation with k-anonymity protection + """ + + def __init__(self, env): + """Initialize spatial query service. + + Args: + env: Odoo environment + """ + self.env = env + + def query_statistics_batch(self, geometries, filters=None, variables=None): + """Execute spatial query for multiple geometries. + + Queries each geometry individually and computes an aggregate summary. + + Args: + geometries: List of dicts with 'id' and 'geometry' keys + filters: Additional filters for registrants (dict) + variables: List of statistic names to compute + + Returns: + dict: Batch results with per-geometry results and summary + - results: List of per-geometry result dicts with metadata + - summary: Aggregate summary across all geometries with metadata + """ + results = [] + all_registrant_ids = set() + + for item in geometries: + geometry_id = item["id"] + geometry = item["geometry"] + + try: + result = self.query_statistics( + geometry=geometry, + filters=filters, + variables=variables, + ) + # Collect registrant IDs for deduplication in summary + registrant_ids = result.pop("registrant_ids", []) + all_registrant_ids.update(registrant_ids) + + results.append( + { + "id": geometry_id, + "total_count": result["total_count"], + "query_method": result["query_method"], + "areas_matched": result["areas_matched"], + "statistics": result["statistics"], + "access_level": result.get("access_level"), + "from_cache": result.get("from_cache", False), + "computed_at": result.get("computed_at"), + } + ) + except Exception as e: + _logger.warning("Batch query failed for geometry '%s': %s", geometry_id, e) + results.append( + { + "id": geometry_id, + "total_count": 0, + "query_method": "error", + "areas_matched": 0, + "statistics": {}, + "access_level": None, + "from_cache": False, + "computed_at": None, + } + ) + + # Compute summary by aggregating unique registrants with metadata + summary_stats_with_metadata = {"statistics": {}} + if all_registrant_ids: + summary_stats_with_metadata = self._compute_statistics(list(all_registrant_ids), variables or []) + + summary = { + "total_count": len(all_registrant_ids), + "geometries_queried": len(geometries), + "statistics": summary_stats_with_metadata.get("statistics", {}), + "access_level": summary_stats_with_metadata.get("access_level"), + "from_cache": summary_stats_with_metadata.get("from_cache", False), + "computed_at": summary_stats_with_metadata.get("computed_at"), + } + + return { + "results": results, + "summary": summary, + } + + def query_statistics(self, geometry, filters=None, variables=None): + """Execute spatial query for statistics within polygon. + + Args: + geometry: GeoJSON geometry (Polygon or MultiPolygon) + filters: Additional filters for registrants (dict) + variables: List of statistic names to compute + + Returns: + dict: Query results with statistics and metadata + - total_count: Number of registrants in polygon + - query_method: "coordinates" or "area_fallback" + - areas_matched: Number of areas intersecting polygon + - statistics: Computed statistics + - access_level: Access level applied to statistics + - from_cache: Whether statistics were served from cache + - computed_at: ISO 8601 timestamp when statistics were computed + """ + filters = filters or {} + variables = variables or [] + + # Convert GeoJSON to PostGIS-compatible format + geometry_json = json.dumps(geometry) + + # Try coordinate-based query first (preferred method) + try: + result = self._query_by_coordinates(geometry_json, filters) + if result["total_count"] > 0: + _logger.info( + "Spatial query using coordinates: %s registrants found", + result["total_count"], + ) + # Compute statistics for the matched registrants with metadata + stats_with_metadata = self._compute_statistics(result["registrant_ids"], variables) + result.update(stats_with_metadata) + return result + except Exception as e: + _logger.warning( + "Coordinate-based query failed: %s, falling back to area-based query", + e, + ) + + # Fall back to area-based query + result = self._query_by_area(geometry_json, filters) + _logger.info( + f"Spatial query using area fallback: {result['total_count']} registrants in {result['areas_matched']} areas" + ) + + # Compute statistics for the matched registrants with metadata + stats_with_metadata = self._compute_statistics(result["registrant_ids"], variables) + result.update(stats_with_metadata) + + return result + + def _query_by_coordinates(self, geometry_json, filters): + """Query registrants by coordinates using ST_Intersects. + + This is the preferred method when registrants have coordinate data. + + Args: + geometry_json: GeoJSON geometry as JSON string + filters: Additional filters for registrants + + Returns: + dict: Query results with registrant_ids + """ + # Build WHERE clause from filters + where_clauses = ["p.is_registrant = true"] + params = [geometry_json] + + if filters.get("is_group") is not None: + where_clauses.append("p.is_group = %s") + params.append(filters["is_group"]) + + if filters.get("disabled") is not None: + if filters["disabled"]: + where_clauses.append("p.disabled IS NOT NULL") + else: + where_clauses.append("p.disabled IS NULL") + + where_clause = " AND ".join(where_clauses) + + # Query using ST_Intersects with coordinates + # Note: This assumes res.partner has a 'coordinates' GeoPointField + # For now, we'll check if the field exists, otherwise return empty result + Partner = self.env["res.partner"] + if not hasattr(Partner, "_fields") or "coordinates" not in Partner._fields: + # Coordinates field doesn't exist, can't use this method + raise ValueError("coordinates field not available on res.partner") + + query = f""" + SELECT p.id + FROM res_partner p + WHERE {where_clause} + AND p.coordinates IS NOT NULL + AND ST_Intersects( + p.coordinates, + ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326) + ) + """ + + # Add geometry parameter at the beginning + params = [geometry_json] + params[1:] + + self.env.cr.execute(query, params) + registrant_ids = [row[0] for row in self.env.cr.fetchall()] + + return { + "total_count": len(registrant_ids), + "query_method": "coordinates", + "areas_matched": 0, # Not applicable for coordinate-based query + "registrant_ids": registrant_ids, + } + + def _query_by_area(self, geometry_json, filters): + """Query registrants by area intersection (fallback method). + + This method finds areas that intersect the query polygon, + then returns all registrants in those areas. Individuals who + lack their own area_id are included if their group (household) + is in a matched area. + + Args: + geometry_json: GeoJSON geometry as JSON string + filters: Additional filters for registrants + + Returns: + dict: Query results with registrant_ids and areas_matched + """ + # First, find areas that intersect the polygon + areas_query = """ + SELECT a.id + FROM spp_area a + WHERE a.geo_polygon IS NOT NULL + AND ST_Intersects( + a.geo_polygon, + ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326) + ) + """ + + self.env.cr.execute(areas_query, [geometry_json]) + area_ids = [row[0] for row in self.env.cr.fetchall()] + + if not area_ids: + return { + "total_count": 0, + "query_method": "area_fallback", + "areas_matched": 0, + "registrant_ids": [], + } + + area_tuple = tuple(area_ids) + + # Build filter clauses shared by both queries + extra_clauses = [] + extra_params = [] + + if filters.get("is_group") is not None: + extra_clauses.append("p.is_group = %s") + extra_params.append(filters["is_group"]) + + if filters.get("disabled") is not None: + if filters["disabled"]: + extra_clauses.append("p.disabled IS NOT NULL") + else: + extra_clauses.append("p.disabled IS NULL") + + extra_where = (" AND " + " AND ".join(extra_clauses)) if extra_clauses else "" + + # Query registrants: those directly in matched areas PLUS + # individuals whose group (household) is in a matched area. + # Individuals often lack area_id; they inherit it from their group. + registrants_query = f""" + SELECT DISTINCT p.id + FROM res_partner p + WHERE p.is_registrant = true + AND ( + p.area_id IN %s + OR ( + p.is_group = false + AND p.area_id IS NULL + AND EXISTS ( + SELECT 1 + FROM spp_group_membership gm + JOIN res_partner g ON g.id = gm.\"group\" + WHERE gm.individual = p.id + AND gm.is_ended = false + AND g.area_id IN %s + ) + ) + ){extra_where} + """ + + params = [area_tuple, area_tuple] + extra_params + self.env.cr.execute(registrants_query, params) + registrant_ids = [row[0] for row in self.env.cr.fetchall()] + + return { + "total_count": len(registrant_ids), + "query_method": "area_fallback", + "areas_matched": len(area_ids), + "registrant_ids": registrant_ids, + } + + def _compute_statistics(self, registrant_ids, variables): + """Compute statistics using the unified aggregation engine only. + + Args: + registrant_ids: List of registrant IDs + variables: List of statistic names to compute + + Returns: + dict: Statistics with metadata (statistics, access_level, from_cache, computed_at) + """ + if not registrant_ids: + return { + "statistics": self._get_empty_statistics(), + "access_level": None, + "from_cache": False, + "computed_at": None, + } + + if "spp.aggregation.service" not in self.env: + raise RuntimeError("spp.aggregation.service is required for GIS statistics queries.") + + return self._compute_via_aggregation_service(registrant_ids, variables) + + def _compute_via_aggregation_service(self, registrant_ids, variables): + """Compute statistics using AggregationService. + + Delegates to the unified aggregation service for statistics computation + with built-in privacy protection. + + Args: + registrant_ids: List of registrant IDs + variables: List of statistic names to compute (or None for GIS defaults) + + Returns: + dict: Statistics with metadata (statistics, access_level, from_cache, computed_at) + """ + if not registrant_ids: + return {"statistics": {}} + + # Create an explicit scope for the registrant IDs + scope = build_explicit_scope(registrant_ids) + + # Determine statistics to compute + statistics_to_compute = variables + if not statistics_to_compute: + # Use GIS-published statistics + Statistic = self.env["spp.statistic"].sudo() + gis_stats = Statistic.get_published_for_context("gis") + statistics_to_compute = [stat.name for stat in gis_stats] if gis_stats else None + + if not statistics_to_compute: + return { + "statistics": {}, + "access_level": None, + "from_cache": False, + "computed_at": None, + } + + # Call AggregationService (no sudo - let service determine access level from calling user) + aggregation_service = self.env["spp.aggregation.service"] + result = aggregation_service.compute_aggregation( + scope=scope, + statistics=statistics_to_compute, + context="gis", + use_cache=False, # Spatial queries are dynamic, don't cache + ) + + # Convert AggregationService result to expected format with metadata + return self._convert_aggregation_result(result, registrant_ids) + + def _convert_aggregation_result(self, agg_result, registrant_ids=None): + """Convert AggregationService result to spatial query format. + + Args: + agg_result: Result from AggregationService + registrant_ids: Backward-compatible arg; not used. + + Returns: + dict: Statistics and metadata (statistics, access_level, from_cache, computed_at) + """ + # Get the statistics dict from aggregation result + statistics = agg_result.get("statistics", {}) + + # Organize by category (if available) + result = {} + grouped_stats = {} + + Statistic = self.env["spp.statistic"].sudo() + statistic_by_name = {stat.name: stat for stat in Statistic.search([("name", "in", list(statistics.keys()))])} + + for stat_name, stat_data in statistics.items(): + stat = statistic_by_name.get(stat_name) + + if stat: + config = stat.get_context_config("gis") + stat_key = stat.name + stat_label = config.get("label", stat.label) + stat_group = config.get("group") or "general" + stat_format = config.get("format", stat.format) + else: + # Keep unknown/built-in stats visible with generic metadata. + stat_key = stat_name + stat_label = stat_name.replace("_", " ").title() + stat_group = "general" + stat_format = "count" if stat_name == "count" else "number" + + # Extract value and suppressed flag + display_value = stat_data.get("value") + is_suppressed = stat_data.get("suppressed", False) + + # Format the result + stat_entry = { + "label": stat_label, + "value": display_value, + "format": stat_format, + "suppressed": is_suppressed, + } + + # Store both flat and grouped + result[stat_key] = display_value + + # Also organize by group for UI + if stat_group not in grouped_stats: + grouped_stats[stat_group] = {} + grouped_stats[stat_group][stat_key] = stat_entry + + # Add grouped organization to result + if grouped_stats: + result["_grouped"] = grouped_stats + + # Return statistics with metadata + return { + "statistics": result, + "access_level": agg_result.get("access_level"), + "from_cache": agg_result.get("from_cache", False), + "computed_at": agg_result.get("computed_at"), + } + + def query_proximity(self, reference_points, radius_km, relation="within", filters=None, variables=None): + """Query registrants by proximity to reference points. + + Uses a temp table with pre-buffered geometries and ST_Intersects + against the indexed res_partner.coordinates column. + + Args: + reference_points: List of dicts with 'longitude' and 'latitude' keys + radius_km: Search radius in kilometers + relation: 'within' (inside radius) or 'beyond' (outside radius) + filters: Additional filters for registrants (dict) + variables: List of statistic names to compute + + Returns: + dict: Query results with statistics and metadata + + Raises: + ValueError: If inputs are invalid + """ + # Validate inputs + if not reference_points: + raise ValueError("reference_points must not be empty") + if radius_km <= 0: + raise ValueError("radius_km must be positive") + if relation not in ("within", "beyond"): + raise ValueError(f"relation must be 'within' or 'beyond', got '{relation}'") + + filters = filters or {} + variables = variables or [] + radius_meters = radius_km * 1000 + + # Try coordinate-based query first + try: + result = self._proximity_by_coordinates(reference_points, radius_meters, relation, filters) + if result["total_count"] > 0: + _logger.info( + "Proximity query (%s, %.1f km) using coordinates: %s registrants found", + relation, + radius_km, + result["total_count"], + ) + registrant_ids = result["registrant_ids"] + stats_with_metadata = self._compute_statistics(registrant_ids, variables) + result.update(stats_with_metadata) + result["reference_points_count"] = len(reference_points) + result["radius_km"] = radius_km + result["relation"] = relation + return result + except Exception as e: + _logger.warning( + "Coordinate-based proximity query failed: %s, falling back to area-based", + e, + ) + + # Fall back to area-based query + result = self._proximity_by_area(reference_points, radius_meters, relation, filters) + _logger.info( + "Proximity query (%s, %.1f km) using area fallback: %s registrants in %s areas", + relation, + radius_km, + result["total_count"], + result["areas_matched"], + ) + registrant_ids = result["registrant_ids"] + stats_with_metadata = self._compute_statistics(registrant_ids, variables) + result.update(stats_with_metadata) + result["reference_points_count"] = len(reference_points) + result["radius_km"] = radius_km + result["relation"] = relation + return result + + def _create_proximity_temp_table(self, reference_points, radius_meters): + """Create temp table with pre-buffered reference point geometries. + + Buffers are computed on the reference points (small set) in geography + for meter-accurate radius, then converted back to geometry so that + ST_Intersects can use the GiST index on res_partner.coordinates. + + Args: + reference_points: List of dicts with 'longitude' and 'latitude' + radius_meters: Buffer radius in meters + """ + # Drop any leftover temp table from a previous call in the same transaction + self.env.cr.execute("DROP TABLE IF EXISTS _prox_ref_points") + + self.env.cr.execute( + """ + CREATE TEMPORARY TABLE _prox_ref_points ( + buffer_geom geometry + ) ON COMMIT DROP + """ + ) + + # Extract lon/lat arrays for bulk insert via unnest + longitudes = [pt["longitude"] for pt in reference_points] + latitudes = [pt["latitude"] for pt in reference_points] + + self.env.cr.execute( + """ + INSERT INTO _prox_ref_points (buffer_geom) + SELECT + ST_Buffer( + ST_SetSRID(ST_MakePoint(lon, lat), 4326)::geography, + %(radius_m)s + )::geometry + FROM unnest(%(lons)s::float[], %(lats)s::float[]) AS t(lon, lat) + """, + { + "radius_m": radius_meters, + "lons": longitudes, + "lats": latitudes, + }, + ) + + # Create spatial index on buffered geometries for efficient intersection + self.env.cr.execute("CREATE INDEX ON _prox_ref_points USING GIST (buffer_geom)") + + def _build_filter_clauses(self, filters): + """Build SQL WHERE clauses and params from filter dict. + + Args: + filters: Dict with optional 'is_group' and 'disabled' keys + + Returns: + tuple: (extra_where_sql, extra_params) + """ + extra_clauses = [] + extra_params = [] + + if filters.get("is_group") is not None: + extra_clauses.append("p.is_group = %s") + extra_params.append(filters["is_group"]) + + if filters.get("disabled") is not None: + if filters["disabled"]: + extra_clauses.append("p.disabled IS NOT NULL") + else: + extra_clauses.append("p.disabled IS NULL") + + extra_where = (" AND " + " AND ".join(extra_clauses)) if extra_clauses else "" + return extra_where, extra_params + + def _proximity_by_coordinates(self, reference_points, radius_meters, relation, filters): + """Query registrants by coordinate proximity to reference points. + + Args: + reference_points: List of dicts with 'longitude' and 'latitude' + radius_meters: Search radius in meters + relation: 'within' or 'beyond' + filters: Additional filters for registrants + + Returns: + dict: Query results with registrant_ids + """ + Partner = self.env["res.partner"] + if not hasattr(Partner, "_fields") or "coordinates" not in Partner._fields: + raise ValueError("coordinates field not available on res.partner") + + self._create_proximity_temp_table(reference_points, radius_meters) + + extra_where, extra_params = self._build_filter_clauses(filters) + + if relation == "within": + # Find registrants whose coordinates intersect any buffer + query = f""" + SELECT p.id + FROM res_partner p + WHERE p.is_registrant = true + AND p.coordinates IS NOT NULL + AND EXISTS ( + SELECT 1 FROM _prox_ref_points r + WHERE ST_Intersects(p.coordinates, r.buffer_geom) + ){extra_where} + """ + else: + # "beyond": find all registrants with coords, minus those within + query = f""" + SELECT p.id + FROM res_partner p + WHERE p.is_registrant = true + AND p.coordinates IS NOT NULL + AND p.id NOT IN ( + SELECT p2.id + FROM res_partner p2 + WHERE p2.is_registrant = true + AND p2.coordinates IS NOT NULL + AND EXISTS ( + SELECT 1 FROM _prox_ref_points r + WHERE ST_Intersects(p2.coordinates, r.buffer_geom) + ) + ){extra_where} + """ + + self.env.cr.execute(query, extra_params) + registrant_ids = [row[0] for row in self.env.cr.fetchall()] + + return { + "total_count": len(registrant_ids), + "query_method": "coordinates", + "areas_matched": 0, + "registrant_ids": registrant_ids, + } + + def _proximity_by_area(self, reference_points, radius_meters, relation, filters): + """Query registrants by area proximity (fallback when coordinates unavailable). + + Uses ST_Intersects between area polygons and buffered reference points + (NOT centroid, which is geometrically incorrect for large areas). + + Args: + reference_points: List of dicts with 'longitude' and 'latitude' + radius_meters: Search radius in meters + relation: 'within' or 'beyond' + filters: Additional filters for registrants + + Returns: + dict: Query results with registrant_ids and areas_matched + """ + self._create_proximity_temp_table(reference_points, radius_meters) + + # Find areas whose polygon intersects (or doesn't) any reference buffer + if relation == "within": + areas_query = """ + SELECT a.id + FROM spp_area a + WHERE a.geo_polygon IS NOT NULL + AND EXISTS ( + SELECT 1 FROM _prox_ref_points r + WHERE ST_Intersects(a.geo_polygon, r.buffer_geom) + ) + """ + else: + areas_query = """ + SELECT a.id + FROM spp_area a + WHERE a.geo_polygon IS NOT NULL + AND NOT EXISTS ( + SELECT 1 FROM _prox_ref_points r + WHERE ST_Intersects(a.geo_polygon, r.buffer_geom) + ) + """ + + self.env.cr.execute(areas_query) + area_ids = [row[0] for row in self.env.cr.fetchall()] + + if not area_ids: + return { + "total_count": 0, + "query_method": "area_fallback", + "areas_matched": 0, + "registrant_ids": [], + } + + area_tuple = tuple(area_ids) + extra_where, extra_params = self._build_filter_clauses(filters) + + # Reuse the same registrant lookup as _query_by_area (includes group membership) + registrants_query = f""" + SELECT DISTINCT p.id + FROM res_partner p + WHERE p.is_registrant = true + AND ( + p.area_id IN %s + OR ( + p.is_group = false + AND p.area_id IS NULL + AND EXISTS ( + SELECT 1 + FROM spp_group_membership gm + JOIN res_partner g ON g.id = gm.\"group\" + WHERE gm.individual = p.id + AND gm.is_ended = false + AND g.area_id IN %s + ) + ) + ){extra_where} + """ + + params = [area_tuple, area_tuple] + extra_params + self.env.cr.execute(registrants_query, params) + registrant_ids = [row[0] for row in self.env.cr.fetchall()] + + return { + "total_count": len(registrant_ids), + "query_method": "area_fallback", + "areas_matched": len(area_ids), + "registrant_ids": registrant_ids, + } + + def _get_empty_statistics(self): + """Return empty statistics structure. + + Returns: + dict: Empty statistics + """ + return {} diff --git a/spp_api_v2_gis/static/description/index.html b/spp_api_v2_gis/static/description/index.html new file mode 100644 index 00000000..5f665740 --- /dev/null +++ b/spp_api_v2_gis/static/description/index.html @@ -0,0 +1,472 @@ + + + + + +README.rst + + + +
+ + + +Odoo Community Association + +
+

OpenSPP GIS API

+ +

Alpha License: LGPL-3 OpenSPP/openspp-modules

+

REST API for QGIS plugin integration, providing GeoJSON endpoints, +spatial queries, and geofence management.

+
+

Key Features

+
    +
  • Layer Catalog: List available GIS layers and reports
  • +
  • GeoJSON Export: Get pre-aggregated layer data for QGIS
  • +
  • QML Styling: Fetch QGIS style files for consistent visualization
  • +
  • Spatial Queries: Query registrant statistics within arbitrary +polygons using PostGIS
  • +
  • Geofence Management: Save and manage areas of interest
  • +
+
+
+

Architecture

+

Follows thin client architecture where QGIS displays data and OpenSPP +performs all computation:

+
    +
  • All spatial queries executed in PostGIS for performance
  • +
  • Pre-aggregated data returned to minimize data transfer
  • +
  • Configuration-driven styling using QML templates
  • +
  • OAuth 2.0 authentication with scope-based access control
  • +
+
+
+

API Endpoints

+
    +
  • GET /gis/catalog - List available layers and reports
  • +
  • GET /gis/layers/{id} - Get layer as GeoJSON FeatureCollection
  • +
  • GET /gis/layers/{id}/qml - Get QGIS style file
  • +
  • POST /gis/query/statistics - Query statistics for polygon
  • +
  • POST /gis/geofences - Create geofence
  • +
  • GET /gis/geofences - List geofences
  • +
  • GET /gis/geofences/{id} - Get single geofence as GeoJSON
  • +
  • DELETE /gis/geofences/{id} - Archive geofence
  • +
  • GET /gis/export/geopackage - Export layers for offline use
  • +
+
+
+

Required Scopes

+
    +
  • gis:read - View layers and statistics
  • +
  • gis:geofence - Create and manage geofences
  • +
+
+
+

Dependencies

+
    +
  • spp_api_v2 - FastAPI infrastructure
  • +
  • spp_gis - PostGIS integration
  • +
  • spp_gis_report - Report configuration
  • +
  • spp_area - Administrative area data
  • +
+
+

Important

+

This is an alpha version, the data model and design can change at any time without warning. +Only for development or testing purpose, do not use in production. +More details on development status

+
+

Table of contents

+ +
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+ +
+
+

Authors

+
    +
  • OpenSPP.org
  • +
+
+
+

Maintainers

+

Current maintainers:

+

jeremi gonzalesedwin1123 reichie020212

+

This module is part of the OpenSPP/openspp-modules project on GitHub.

+

You are welcome to contribute.

+
+
+
+ + diff --git a/spp_api_v2_gis/tests/__init__.py b/spp_api_v2_gis/tests/__init__.py new file mode 100644 index 00000000..06c22da0 --- /dev/null +++ b/spp_api_v2_gis/tests/__init__.py @@ -0,0 +1,12 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +from . import test_catalog_service +from . import test_export_service +from . import test_geofence_model +from . import test_layers_service +from . import test_ogc_features +from . import test_ogc_http +from . import test_qml_template_service +from . import test_spatial_query_service +from . import test_statistics_endpoint +from . import test_batch_query +from . import test_proximity_query diff --git a/spp_api_v2_gis/tests/test_batch_query.py b/spp_api_v2_gis/tests/test_batch_query.py new file mode 100644 index 00000000..19cb9fa6 --- /dev/null +++ b/spp_api_v2_gis/tests/test_batch_query.py @@ -0,0 +1,351 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Tests for batch spatial query service.""" + +from datetime import date + +from odoo.tests.common import TransactionCase + + +class TestBatchSpatialQueryService(TransactionCase): + """Test batch spatial query service functionality.""" + + @classmethod + def setUpClass(cls): + """Set up test data.""" + super().setUpClass() + + # Create two test areas with distinct registrants + cls.area_1 = cls.env["spp.area"].create( + { + "draft_name": "Batch Test District 1", + "code": "BATCH-DIST-001", + } + ) + cls.area_2 = cls.env["spp.area"].create( + { + "draft_name": "Batch Test District 2", + "code": "BATCH-DIST-002", + } + ) + + # Registrants in area 1 + cls.group_1 = cls.env["res.partner"].create( + { + "name": "Batch Household 1", + "is_registrant": True, + "is_group": True, + "area_id": cls.area_1.id, + } + ) + cls.individual_1 = cls.env["res.partner"].create( + { + "name": "Batch Individual 1", + "is_registrant": True, + "is_group": False, + "area_id": cls.area_1.id, + "birthdate": date(1990, 5, 15), + } + ) + + # Registrants in area 2 + cls.group_2 = cls.env["res.partner"].create( + { + "name": "Batch Household 2", + "is_registrant": True, + "is_group": True, + "area_id": cls.area_2.id, + } + ) + cls.individual_2 = cls.env["res.partner"].create( + { + "name": "Batch Individual 2", + "is_registrant": True, + "is_group": False, + "area_id": cls.area_2.id, + "birthdate": date(2000, 8, 20), + } + ) + + def test_batch_query_returns_per_geometry_results(self): + """Test that batch query returns individual results for each geometry.""" + from ..services.spatial_query_service import SpatialQueryService + + service = SpatialQueryService(self.env) + + # Use the simple geometry format that would be sent from the plugin + geometries = [ + { + "id": "zone_1", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [1, 0], [1, 1], [0, 1], [0, 0]]], + }, + }, + { + "id": "zone_2", + "geometry": { + "type": "Polygon", + "coordinates": [[[2, 2], [3, 2], [3, 3], [2, 3], [2, 2]]], + }, + }, + ] + + result = service.query_statistics_batch( + geometries=geometries, + filters=None, + variables=None, + ) + + # Verify structure + self.assertIn("results", result) + self.assertIn("summary", result) + self.assertEqual(len(result["results"]), 2) + + # Each result should have the expected fields including metadata + for item in result["results"]: + self.assertIn("id", item) + self.assertIn("total_count", item) + self.assertIn("query_method", item) + self.assertIn("areas_matched", item) + self.assertIn("statistics", item) + self.assertIn("access_level", item) + self.assertIn("from_cache", item) + self.assertIn("computed_at", item) + + # IDs should match request + result_ids = {r["id"] for r in result["results"]} + self.assertEqual(result_ids, {"zone_1", "zone_2"}) + + def test_batch_query_summary_aggregation(self): + """Test that batch query summary aggregates results.""" + from ..services.spatial_query_service import SpatialQueryService + + service = SpatialQueryService(self.env) + + geometries = [ + { + "id": "area_a", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [1, 0], [1, 1], [0, 1], [0, 0]]], + }, + }, + ] + + result = service.query_statistics_batch(geometries=geometries) + + summary = result["summary"] + self.assertIn("total_count", summary) + self.assertIn("geometries_queried", summary) + self.assertIn("statistics", summary) + self.assertIn("access_level", summary) + self.assertIn("from_cache", summary) + self.assertIn("computed_at", summary) + self.assertEqual(summary["geometries_queried"], 1) + + def test_batch_query_handles_invalid_geometry(self): + """Test that batch query handles errors for individual geometries.""" + from ..services.spatial_query_service import SpatialQueryService + + service = SpatialQueryService(self.env) + + # Mix valid and invalid geometries + geometries = [ + { + "id": "valid", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [1, 0], [1, 1], [0, 1], [0, 0]]], + }, + }, + { + "id": "invalid", + "geometry": {"type": "InvalidType", "coordinates": []}, + }, + ] + + # Should not raise - individual errors are caught + result = service.query_statistics_batch(geometries=geometries) + + self.assertEqual(len(result["results"]), 2) + + # The invalid geometry should return error/empty result + invalid_result = next(r for r in result["results"] if r["id"] == "invalid") + self.assertEqual(invalid_result["total_count"], 0) + + def test_batch_query_with_variables(self): + """Test batch query passes variables to individual queries.""" + from ..services.spatial_query_service import SpatialQueryService + + service = SpatialQueryService(self.env) + + geometries = [ + { + "id": "test", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [1, 0], [1, 1], [0, 1], [0, 0]]], + }, + }, + ] + + # Passing nonexistent variables should still return a valid response + result = service.query_statistics_batch( + geometries=geometries, + variables=["nonexistent_var"], + ) + + self.assertIn("results", result) + self.assertEqual(len(result["results"]), 1) + + def test_batch_query_with_filters(self): + """Test batch query passes filters to individual queries.""" + from ..services.spatial_query_service import SpatialQueryService + + service = SpatialQueryService(self.env) + + geometries = [ + { + "id": "filtered", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [1, 0], [1, 1], [0, 1], [0, 0]]], + }, + }, + ] + + result = service.query_statistics_batch( + geometries=geometries, + filters={"is_group": True}, + ) + + self.assertIn("results", result) + self.assertEqual(len(result["results"]), 1) + + def test_batch_query_empty_geometries_list(self): + """Test batch query with empty geometries returns empty results.""" + from ..services.spatial_query_service import SpatialQueryService + + service = SpatialQueryService(self.env) + + result = service.query_statistics_batch(geometries=[]) + + self.assertEqual(len(result["results"]), 0) + self.assertEqual(result["summary"]["total_count"], 0) + self.assertEqual(result["summary"]["geometries_queried"], 0) + + +class TestBatchSpatialQuerySchemas(TransactionCase): + """Test batch query Pydantic schemas.""" + + def test_batch_request_schema(self): + """Test BatchSpatialQueryRequest accepts valid input.""" + from ..schemas.query import BatchSpatialQueryRequest + + request = BatchSpatialQueryRequest( + geometries=[ + { + "id": "zone_1", + "geometry": { + "type": "Polygon", + "coordinates": [[[0, 0], [1, 0], [1, 1], [0, 1], [0, 0]]], + }, + } + ], + filters={"is_group": True}, + variables=["children_under_5"], + ) + + self.assertEqual(len(request.geometries), 1) + self.assertEqual(request.geometries[0].id, "zone_1") + self.assertEqual(request.filters, {"is_group": True}) + self.assertEqual(request.variables, ["children_under_5"]) + + def test_batch_request_requires_geometries(self): + """Test that BatchSpatialQueryRequest requires at least one geometry.""" + from pydantic import ValidationError + + from ..schemas.query import BatchSpatialQueryRequest + + with self.assertRaises(ValidationError): + BatchSpatialQueryRequest(geometries=[]) + + def test_batch_response_schema(self): + """Test BatchSpatialQueryResponse structure with metadata.""" + from ..schemas.query import BatchSpatialQueryResponse + + response = BatchSpatialQueryResponse( + results=[ + { + "id": "zone_1", + "total_count": 100, + "query_method": "coordinates", + "areas_matched": 2, + "statistics": {"total_households": 50}, + "access_level": "aggregate", + "from_cache": False, + "computed_at": "2024-01-01T00:00:00Z", + } + ], + summary={ + "total_count": 100, + "geometries_queried": 1, + "statistics": {"total_households": 50}, + "access_level": "aggregate", + "from_cache": False, + "computed_at": "2024-01-01T00:00:00Z", + }, + ) + + self.assertEqual(len(response.results), 1) + self.assertEqual(response.results[0].id, "zone_1") + self.assertEqual(response.results[0].access_level, "aggregate") + self.assertFalse(response.results[0].from_cache) + self.assertEqual(response.results[0].computed_at, "2024-01-01T00:00:00Z") + self.assertEqual(response.summary.total_count, 100) + self.assertEqual(response.summary.access_level, "aggregate") + + def test_geometry_item_schema(self): + """Test GeometryItem schema.""" + from ..schemas.query import GeometryItem + + item = GeometryItem( + id="flood_zone_1", + geometry={ + "type": "MultiPolygon", + "coordinates": [[[[0, 0], [1, 0], [1, 1], [0, 1], [0, 0]]]], + }, + ) + + self.assertEqual(item.id, "flood_zone_1") + self.assertEqual(item.geometry["type"], "MultiPolygon") + + def test_batch_response_backward_compatibility(self): + """Test that schemas work without metadata fields (backward compatibility).""" + from ..schemas.query import BatchSpatialQueryResponse + + # Old-style response without metadata fields (should use defaults) + response = BatchSpatialQueryResponse( + results=[ + { + "id": "zone_1", + "total_count": 100, + "query_method": "coordinates", + "areas_matched": 2, + "statistics": {"total_households": 50}, + } + ], + summary={ + "total_count": 100, + "geometries_queried": 1, + "statistics": {"total_households": 50}, + }, + ) + + # Metadata fields should have defaults + self.assertIsNone(response.results[0].access_level) + self.assertFalse(response.results[0].from_cache) + self.assertIsNone(response.results[0].computed_at) + self.assertIsNone(response.summary.access_level) + self.assertFalse(response.summary.from_cache) + self.assertIsNone(response.summary.computed_at) diff --git a/spp_api_v2_gis/tests/test_catalog_service.py b/spp_api_v2_gis/tests/test_catalog_service.py new file mode 100644 index 00000000..b7e50013 --- /dev/null +++ b/spp_api_v2_gis/tests/test_catalog_service.py @@ -0,0 +1,436 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Tests for catalog service.""" + +import logging +from datetime import datetime, timedelta + +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + +_logger = logging.getLogger(__name__) + + +@tagged("post_install", "-at_install") +class TestCatalogService(TransactionCase): + """Test catalog service functionality.""" + + @classmethod + def setUpClass(cls): + """Set up test data.""" + super().setUpClass() + + # Create color scheme for reports + cls.color_scheme = cls.env["spp.gis.color.scheme"].create( + { + "name": "Test Color Scheme", + "code": "test_scheme", + "scheme_type": "sequential", + "colors": '["#440154", "#21918c", "#fde725"]', + "default_steps": 3, + } + ) + + # Create report category + cls.category = cls.env["spp.gis.report.category"].create( + { + "name": "Test Category", + "code": "test_category", + "sequence": 10, + } + ) + + # Create area model reference + cls.area_model = cls.env["ir.model"].search([("model", "=", "spp.area")], limit=1) + + # Create test reports + cls.report1 = cls.env["spp.gis.report"].create( + { + "name": "Test Report 1", + "code": "test_report_1", + "description": "Test report description", + "category_id": cls.category.id, + "source_model_id": cls.area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 2, + "normalization_method": "raw", + "geometry_type": "polygon", + "color_scheme_id": cls.color_scheme.id, + "last_refresh": datetime.now() - timedelta(days=1), + "sequence": 10, + } + ) + + cls.report2 = cls.env["spp.gis.report"].create( + { + "name": "Test Report 2", + "code": "test_report_2", + "source_model_id": cls.area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 3, + "normalization_method": "raw", + "geometry_type": "point", + "sequence": 20, + } + ) + + # Create inactive report (should not appear in catalog) + cls.inactive_report = cls.env["spp.gis.report"].create( + { + "name": "Inactive Report", + "code": "inactive_report", + "source_model_id": cls.area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 2, + "normalization_method": "raw", + "geometry_type": "polygon", + "active": False, + "sequence": 30, + } + ) + + # Create data layer + cls.geo_field = cls.env["ir.model.fields"].search( + [("model", "=", "spp.area"), ("name", "=", "polygon")], + limit=1, + ) + if cls.geo_field: + cls.data_layer = cls.env["spp.gis.data.layer"].create( + { + "name": "Test Data Layer", + "model_name": "spp.area", + "geo_field_id": cls.geo_field.id, + "geo_repr": "basic", + "sequence": 10, + } + ) + else: + cls.data_layer = None + + def test_get_catalog_with_reports(self): + """Test getting catalog with reports.""" + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + catalog = service.get_catalog() + + # Verify catalog structure + self.assertIn("reports", catalog) + self.assertIn("data_layers", catalog) + + # Verify reports are present + reports = catalog["reports"] + self.assertGreaterEqual(len(reports), 2, "Should have at least 2 active reports") + + # Verify report 1 data + report1_data = next((r for r in reports if r["id"] == "test_report_1"), None) + self.assertIsNotNone(report1_data) + self.assertEqual(report1_data["name"], "Test Report 1") + self.assertEqual(report1_data["description"], "Test report description") + self.assertEqual(report1_data["category"], "Test Category") + self.assertEqual(report1_data["geometry_type"], "polygon") + self.assertEqual(report1_data["area_level"], 2) + self.assertIsNotNone(report1_data["last_refresh"]) + + # Verify report 2 data + report2_data = next((r for r in reports if r["id"] == "test_report_2"), None) + self.assertIsNotNone(report2_data) + self.assertEqual(report2_data["name"], "Test Report 2") + self.assertIsNone(report2_data["category"]) + self.assertEqual(report2_data["geometry_type"], "point") + self.assertEqual(report2_data["area_level"], 3) + + # Verify inactive report is not present + inactive_data = next((r for r in reports if r["id"] == "inactive_report"), None) + self.assertIsNone(inactive_data, "Inactive reports should not appear in catalog") + + def test_get_catalog_with_data_layers(self): + """Test getting catalog with data layers.""" + if not self.data_layer: + self.skipTest("No data layer available (spp.area polygon field not found)") + + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + catalog = service.get_catalog() + + # Verify data layers are present + data_layers = catalog["data_layers"] + self.assertGreater(len(data_layers), 0, "Should have at least 1 data layer") + + # Verify data layer structure + layer_data = next((layer for layer in data_layers if layer["name"] == "Test Data Layer"), None) + self.assertIsNotNone(layer_data) + self.assertEqual(layer_data["name"], "Test Data Layer") + self.assertEqual(layer_data["geometry_type"], "polygon") + self.assertEqual(layer_data["source_model"], "spp.area") + # Verify source_type and report_code fields + self.assertIn("source_type", layer_data) + self.assertEqual(layer_data["source_type"], "model") + self.assertIn("report_code", layer_data) + self.assertIsNone(layer_data["report_code"]) + + def test_get_catalog_report_driven_data_layer(self): + """Test catalog includes source_type and report_code for report-driven layers.""" + if not self.geo_field: + self.skipTest("No geo field available for data layer creation") + # Create a report-driven data layer + report_layer = self.env["spp.gis.data.layer"].create( + { + "name": "Report-Driven Layer", + "source_type": "report", + "report_id": self.report1.id, + "geo_field_id": self.geo_field.id, + "geo_repr": "choropleth", + "sequence": 99, + } + ) + + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + catalog = service.get_catalog() + + data_layers = catalog["data_layers"] + layer_data = next( + (layer for layer in data_layers if layer["id"] == str(report_layer.id)), + None, + ) + self.assertIsNotNone(layer_data, "Report-driven data layer should appear in catalog") + self.assertEqual(layer_data["source_type"], "report") + self.assertEqual(layer_data["report_code"], "test_report_1") + + def test_catalog_reports_ordered_by_sequence(self): + """Test that reports are ordered by sequence then name.""" + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + catalog = service.get_catalog() + + reports = catalog["reports"] + report_codes = [r["id"] for r in reports if r["id"].startswith("test_report_")] + + # Should be ordered by sequence + self.assertGreater( + report_codes.index("test_report_1"), + -1, + ) + self.assertGreater( + report_codes.index("test_report_2"), + -1, + ) + + def test_empty_catalog(self): + """Test catalog with no reports or layers.""" + # Deactivate all test reports + self.report1.active = False + self.report2.active = False + + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + catalog = service.get_catalog() + + # Should still have structure but may be empty or have other reports + self.assertIn("reports", catalog) + self.assertIn("data_layers", catalog) + self.assertIsInstance(catalog["reports"], list) + self.assertIsInstance(catalog["data_layers"], list) + + def test_freshness_indicator_fresh(self): + """Test freshness indicator for recently refreshed report.""" + # Set last_refresh to recent time + self.report1.write( + { + "last_refresh": datetime.now() - timedelta(hours=1), + "is_stale": False, + } + ) + + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + catalog = service.get_catalog() + + report1_data = next((r for r in catalog["reports"] if r["id"] == "test_report_1"), None) + self.assertIsNotNone(report1_data) + self.assertEqual(report1_data["freshness"], "fresh") + + def test_freshness_indicator_stale(self): + """Test freshness indicator for stale report.""" + # Mark report as stale + self.report1.write( + { + "last_refresh": datetime.now() - timedelta(days=10), + "is_stale": True, + } + ) + + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + catalog = service.get_catalog() + + report1_data = next((r for r in catalog["reports"] if r["id"] == "test_report_1"), None) + self.assertIsNotNone(report1_data) + self.assertEqual(report1_data["freshness"], "stale") + + def test_freshness_indicator_never_refreshed(self): + """Test freshness indicator for never refreshed report.""" + # Clear last_refresh + self.report2.write({"last_refresh": False}) + + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + catalog = service.get_catalog() + + report2_data = next((r for r in catalog["reports"] if r["id"] == "test_report_2"), None) + self.assertIsNotNone(report2_data) + self.assertEqual(report2_data["freshness"], "never_refreshed") + + def test_normalize_geometry_type_polygon(self): + """Test geometry type normalization for polygon.""" + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + self.assertEqual(service._normalize_geometry_type("polygon"), "polygon") + + def test_normalize_geometry_type_point(self): + """Test geometry type normalization for point.""" + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + self.assertEqual(service._normalize_geometry_type("point"), "point") + + def test_normalize_geometry_type_cluster(self): + """Test geometry type normalization for cluster.""" + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + # Cluster is still point type + self.assertEqual(service._normalize_geometry_type("cluster"), "point") + + def test_normalize_geometry_type_heatmap(self): + """Test geometry type normalization for heatmap.""" + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + # Heatmap is based on points + self.assertEqual(service._normalize_geometry_type("heatmap"), "point") + + def test_normalize_geometry_type_unknown(self): + """Test geometry type normalization for unknown type.""" + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + # Should default to polygon + self.assertEqual(service._normalize_geometry_type("unknown"), "polygon") + + def test_data_layer_geo_repr_mapping(self): + """Test data layer geo_repr to geometry type mapping.""" + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + # Basic and choropleth both map to polygon + self.assertEqual(service._map_geo_repr_to_geometry_type("basic"), "polygon") + self.assertEqual(service._map_geo_repr_to_geometry_type("choropleth"), "polygon") + + def test_report_has_admin_levels_available(self): + """Test that reports include admin_levels_available from report data.""" + # Create area type for level 1 + area_type_1 = self.env["spp.area.type"].create({"name": "Region"}) + area_type_2 = self.env["spp.area.type"].create({"name": "District"}) + + # Create areas at levels 1 and 2 + area_l1 = self.env["spp.area"].create( + { + "draft_name": "Catalog Test Region", + "code": "catalog_test_region", + "area_type_id": area_type_1.id, + } + ) + area_l2 = self.env["spp.area"].create( + { + "draft_name": "Catalog Test District", + "code": "catalog_test_district", + "parent_id": area_l1.id, + "area_type_id": area_type_2.id, + } + ) + + # Create report data at both levels + self.env["spp.gis.report.data"].create( + { + "report_id": self.report1.id, + "area_id": area_l1.id, + "area_code": area_l1.code, + "area_name": area_l1.draft_name, + "area_level": area_l1.area_level, + "raw_value": 100.0, + "normalized_value": 0.5, + "display_value": "100", + "record_count": 100, + } + ) + self.env["spp.gis.report.data"].create( + { + "report_id": self.report1.id, + "area_id": area_l2.id, + "area_code": area_l2.code, + "area_name": area_l2.draft_name, + "area_level": area_l2.area_level, + "raw_value": 50.0, + "normalized_value": 0.25, + "display_value": "50", + "record_count": 50, + } + ) + + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + catalog = service.get_catalog() + + report1_data = next((r for r in catalog["reports"] if r["id"] == "test_report_1"), None) + self.assertIsNotNone(report1_data) + self.assertIn("admin_levels_available", report1_data) + levels = report1_data["admin_levels_available"] + self.assertIsInstance(levels, list) + self.assertEqual(levels, sorted(levels)) + self.assertIn(area_l1.area_level, levels) + self.assertIn(area_l2.area_level, levels) + + def test_report_no_data_has_empty_admin_levels(self): + """Test that reports with no data have empty admin_levels_available.""" + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + catalog = service.get_catalog() + + report2_data = next((r for r in catalog["reports"] if r["id"] == "test_report_2"), None) + self.assertIsNotNone(report2_data) + self.assertIn("admin_levels_available", report2_data) + self.assertEqual(report2_data["admin_levels_available"], []) + + def test_catalog_has_area_level_names(self): + """Test that catalog includes area_level_names mapping.""" + # Create area type and area at known level + area_type = self.env["spp.area.type"].create({"name": "Province"}) + self.env["spp.area"].create( + { + "draft_name": "Catalog Level Name Test", + "code": "catalog_level_name_test", + "area_type_id": area_type.id, + } + ) + + from ..services.catalog_service import CatalogService + + service = CatalogService(self.env) + catalog = service.get_catalog() + + self.assertIn("area_level_names", catalog) + self.assertIsInstance(catalog["area_level_names"], dict) diff --git a/spp_api_v2_gis/tests/test_export_service.py b/spp_api_v2_gis/tests/test_export_service.py new file mode 100644 index 00000000..98951ed8 --- /dev/null +++ b/spp_api_v2_gis/tests/test_export_service.py @@ -0,0 +1,471 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Tests for export service.""" + +import json +import logging +import zipfile +from io import BytesIO + +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + +_logger = logging.getLogger(__name__) + + +@tagged("post_install", "-at_install") +class TestExportService(TransactionCase): + """Test export service functionality.""" + + @classmethod + def setUpClass(cls): + """Set up test data.""" + super().setUpClass() + + # Sample polygon GeoJSON + cls.sample_polygon = { + "type": "Polygon", + "coordinates": [ + [ + [100.0, 0.0], + [101.0, 0.0], + [101.0, 1.0], + [100.0, 1.0], + [100.0, 0.0], + ] + ], + } + + # Create color scheme + cls.color_scheme = cls.env["spp.gis.color.scheme"].create( + { + "name": "Test Export Colors", + "code": "test_export", + "scheme_type": "sequential", + "colors": '["#440154", "#21918c"]', + "default_steps": 2, + } + ) + + # Create area model reference + cls.area_model = cls.env["ir.model"].search([("model", "=", "spp.area")], limit=1) + + # Create test reports + cls.report1 = cls.env["spp.gis.report"].create( + { + "name": "Export Test Report 1", + "code": "export_test_report_1", + "source_model_id": cls.area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 2, + "normalization_method": "raw", + "geometry_type": "polygon", + "color_scheme_id": cls.color_scheme.id, + } + ) + + cls.report2 = cls.env["spp.gis.report"].create( + { + "name": "Export Test Report 2", + "code": "export_test_report_2", + "source_model_id": cls.area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 2, + "normalization_method": "raw", + "geometry_type": "point", + } + ) + + # Create test geofences + cls.geofence1 = cls.env["spp.gis.geofence"].create( + { + "name": "Export Geofence 1", + "geometry": json.dumps(cls.sample_polygon), + "geofence_type": "custom", + } + ) + + cls.geofence2 = cls.env["spp.gis.geofence"].create( + { + "name": "Export Geofence 2", + "geometry": json.dumps(cls.sample_polygon), + "geofence_type": "hazard_zone", + } + ) + + def test_export_geopackage_fallback_to_zip(self): + """Test that export falls back to ZIP when fiona unavailable.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + # Export (should fall back to ZIP since fiona likely not available in tests) + content, filename, content_type = service.export_geopackage( + layer_ids=["export_test_report_1"], + include_geofences=True, + ) + + # Verify result (likely ZIP fallback) + self.assertTrue(content) + self.assertTrue(filename) + self.assertIn(content_type, ["application/geopackage+sqlite3", "application/zip"]) + + def test_export_specific_layers(self): + """Test exporting specific layers.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + content, filename, content_type = service.export_geopackage( + layer_ids=["export_test_report_1", "export_test_report_2"], + include_geofences=False, + ) + + self.assertTrue(content) + self.assertIsInstance(content, bytes) + + def test_export_all_layers(self): + """Test exporting all available layers.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + content, filename, content_type = service.export_geopackage( + layer_ids=None, # Export all + include_geofences=False, + ) + + self.assertTrue(content) + self.assertIsInstance(content, bytes) + + def test_export_with_geofences(self): + """Test exporting with geofences included.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + content, filename, content_type = service.export_geopackage( + layer_ids=["export_test_report_1"], + include_geofences=True, + ) + + self.assertTrue(content) + self.assertIsInstance(content, bytes) + + def test_export_without_geofences(self): + """Test exporting without geofences.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + content, filename, content_type = service.export_geopackage( + layer_ids=["export_test_report_1"], + include_geofences=False, + ) + + self.assertTrue(content) + self.assertIsInstance(content, bytes) + + def test_export_no_data_raises_error(self): + """Test that exporting with no data raises error.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + # Deactivate all reports + self.report1.active = False + self.report2.active = False + + # Deactivate all geofences + self.geofence1.active = False + self.geofence2.active = False + + with self.assertRaises(ValueError) as context: + service.export_geopackage( + layer_ids=[], + include_geofences=True, + ) + + self.assertIn("No data available", str(context.exception)) + + def test_collect_layers_specific_codes(self): + """Test collecting specific layers by code.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + layers_data = service._collect_layers( + layer_ids=["export_test_report_1"], + admin_level=None, + ) + + self.assertGreater(len(layers_data), 0) + # Verify structure: list of (name, geojson) tuples + self.assertIsInstance(layers_data[0], tuple) + self.assertEqual(len(layers_data[0]), 2) + + def test_collect_layers_all(self): + """Test collecting all available layers.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + layers_data = service._collect_layers( + layer_ids=None, + admin_level=None, + ) + + # Should collect all active reports + self.assertGreater(len(layers_data), 0) + + def test_collect_layers_with_admin_level_filter(self): + """Test collecting layers with admin level filter.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + layers_data = service._collect_layers( + layer_ids=["export_test_report_1"], + admin_level=2, + ) + + # Should complete without error + self.assertIsInstance(layers_data, list) + + def test_collect_layers_invalid_code(self): + """Test collecting layers with invalid code logs warning.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + layers_data = service._collect_layers( + layer_ids=["nonexistent_report"], + admin_level=None, + ) + + # Should return empty list for invalid codes + self.assertEqual(len(layers_data), 0) + + def test_collect_geofences(self): + """Test collecting geofences.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + geofences_data = service._collect_geofences() + + # Should return list of (name, geojson) tuples + self.assertIsInstance(geofences_data, list) + if len(geofences_data) > 0: + self.assertEqual(len(geofences_data), 1) # Returns as single "geofences" layer + name, geojson = geofences_data[0] + self.assertEqual(name, "geofences") + self.assertEqual(geojson["type"], "FeatureCollection") + self.assertGreater(len(geojson["features"]), 0) + + def test_collect_geofences_empty(self): + """Test collecting geofences when none exist.""" + # Deactivate all geofences + self.geofence1.active = False + self.geofence2.active = False + + from ..services.export_service import ExportService + + service = ExportService(self.env) + geofences_data = service._collect_geofences() + + self.assertEqual(len(geofences_data), 0) + + def test_create_geojson_zip(self): + """Test creating GeoJSON ZIP archive.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + # Collect some data + layers_data = service._collect_layers( + layer_ids=["export_test_report_1"], + admin_level=None, + ) + geofences_data = service._collect_geofences() + + # Create ZIP + content, filename, content_type = service._create_geojson_zip( + layers_data, + geofences_data, + ) + + # Verify ZIP structure + self.assertEqual(filename, "openspp_export.zip") + self.assertEqual(content_type, "application/zip") + self.assertIsInstance(content, bytes) + + # Verify ZIP contents + with zipfile.ZipFile(BytesIO(content), "r") as zf: + file_list = zf.namelist() + self.assertGreater(len(file_list), 0) + + # Verify files are GeoJSON + for file_name in file_list: + self.assertTrue(file_name.endswith(".geojson")) + + # Verify file contains valid GeoJSON + with zf.open(file_name) as f: + geojson_data = json.load(f) + self.assertEqual(geojson_data["type"], "FeatureCollection") + + def test_sanitize_filename_basic(self): + """Test filename sanitization.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + self.assertEqual(service._sanitize_filename("simple_name"), "simple_name") + self.assertEqual(service._sanitize_filename("name with spaces"), "name_with_spaces") + self.assertEqual(service._sanitize_filename("name/with/slashes"), "name_with_slashes") + self.assertEqual(service._sanitize_filename("name\\with\\backslashes"), "name_with_backslashes") + + def test_sanitize_filename_special_chars(self): + """Test filename sanitization with special characters.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + # Should keep alphanumeric, underscore, and dash + result = service._sanitize_filename("name-123_test") + self.assertEqual(result, "name-123_test") + + # Should remove other special characters + result = service._sanitize_filename("name@#$%test") + self.assertEqual(result, "nametest") + + def test_sanitize_filename_empty(self): + """Test filename sanitization with empty string.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + # Should return default "layer" + result = service._sanitize_filename("") + self.assertEqual(result, "layer") + + def test_get_geometry_type_polygon(self): + """Test extracting polygon geometry type.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + feature = { + "type": "Feature", + "geometry": {"type": "Polygon", "coordinates": []}, + "properties": {}, + } + + geom_type = service._get_geometry_type(feature) + self.assertEqual(geom_type, "Polygon") + + def test_get_geometry_type_point(self): + """Test extracting point geometry type.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + feature = { + "type": "Feature", + "geometry": {"type": "Point", "coordinates": []}, + "properties": {}, + } + + geom_type = service._get_geometry_type(feature) + self.assertEqual(geom_type, "Point") + + def test_get_geometry_type_multipolygon(self): + """Test extracting multipolygon geometry type.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + feature = { + "type": "Feature", + "geometry": {"type": "MultiPolygon", "coordinates": []}, + "properties": {}, + } + + geom_type = service._get_geometry_type(feature) + self.assertEqual(geom_type, "MultiPolygon") + + def test_get_geometry_type_missing(self): + """Test extracting geometry type when missing.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + feature = { + "type": "Feature", + "geometry": None, + "properties": {}, + } + + geom_type = service._get_geometry_type(feature) + self.assertEqual(geom_type, "Point") # Default + + def test_build_schema_from_feature(self): + """Test building fiona schema from feature properties.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + feature = { + "type": "Feature", + "geometry": {"type": "Polygon"}, + "properties": { + "name": "Test", + "count": 42, + "value": 3.14, + "active": True, + }, + } + + schema = service._build_schema(feature, "Polygon") + + self.assertEqual(schema["geometry"], "Polygon") + self.assertIn("properties", schema) + + props = schema["properties"] + self.assertEqual(props["name"], "str") + self.assertEqual(props["count"], "int") + self.assertEqual(props["value"], "float") + self.assertEqual(props["active"], "bool") + + def test_build_schema_empty_properties(self): + """Test building schema with empty properties.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + feature = { + "type": "Feature", + "geometry": {"type": "Point"}, + "properties": {}, + } + + schema = service._build_schema(feature, "Point") + + self.assertEqual(schema["geometry"], "Point") + self.assertEqual(len(schema["properties"]), 0) + + def test_export_returns_valid_tuple(self): + """Test that export returns valid tuple structure.""" + from ..services.export_service import ExportService + + service = ExportService(self.env) + + result = service.export_geopackage( + layer_ids=["export_test_report_1"], + include_geofences=False, + ) + + # Should return tuple of (bytes, filename, content_type) + self.assertIsInstance(result, tuple) + self.assertEqual(len(result), 3) + + content, filename, content_type = result + self.assertIsInstance(content, bytes) + self.assertIsInstance(filename, str) + self.assertIsInstance(content_type, str) diff --git a/spp_api_v2_gis/tests/test_geofence_model.py b/spp_api_v2_gis/tests/test_geofence_model.py new file mode 100644 index 00000000..9c00ee86 --- /dev/null +++ b/spp_api_v2_gis/tests/test_geofence_model.py @@ -0,0 +1,447 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Tests for geofence model.""" + +import json +import logging + +import psycopg2 + +from odoo.exceptions import ValidationError +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + +_logger = logging.getLogger(__name__) + + +@tagged("post_install", "-at_install") +class TestGeofenceModel(TransactionCase): + """Test geofence model functionality.""" + + @classmethod + def setUpClass(cls): + """Set up test data.""" + super().setUpClass() + + # Sample polygon GeoJSON + cls.sample_polygon = { + "type": "Polygon", + "coordinates": [ + [ + [100.0, 0.0], + [101.0, 0.0], + [101.0, 1.0], + [100.0, 1.0], + [100.0, 0.0], + ] + ], + } + + # Sample multipolygon GeoJSON + cls.sample_multipolygon = { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [102.0, 2.0], + [103.0, 2.0], + [103.0, 3.0], + [102.0, 3.0], + [102.0, 2.0], + ] + ], + [ + [ + [100.0, 0.0], + [101.0, 0.0], + [101.0, 1.0], + [100.0, 1.0], + [100.0, 0.0], + ] + ], + ], + } + + # Create test vocabularies for tags + # Note: tag_ids on spp.gis.geofence is Many2many to spp.vocabulary, not spp.vocabulary.code + cls.tag1 = cls.env["spp.vocabulary"].search( + [("namespace_uri", "=", "urn:openspp:concept:geofence_tag_1")], + limit=1, + ) + if not cls.tag1: + cls.tag1 = cls.env["spp.vocabulary"].create( + { + "name": "Test Tag 1", + "namespace_uri": "urn:openspp:concept:geofence_tag_1", + } + ) + + def test_create_geofence_basic(self): + """Test creating a basic geofence.""" + geofence = self.env["spp.gis.geofence"].create( + { + "name": "Test Geofence", + "description": "Test description", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "custom", + } + ) + + self.assertTrue(geofence) + self.assertEqual(geofence.name, "Test Geofence") + self.assertEqual(geofence.geofence_type, "custom") + self.assertTrue(geofence.active) + self.assertEqual(geofence.created_from, "ui") + + def test_create_geofence_with_type(self): + """Test creating geofence with different types.""" + types = ["hazard_zone", "service_area", "targeting_area", "custom"] + + for geofence_type in types: + geofence = self.env["spp.gis.geofence"].create( + { + "name": f"Test {geofence_type}", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": geofence_type, + } + ) + + self.assertEqual(geofence.geofence_type, geofence_type) + + def test_create_geofence_with_tags(self): + """Test creating geofence with tags.""" + geofence = self.env["spp.gis.geofence"].create( + { + "name": "Tagged Geofence", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "custom", + "tag_ids": [(6, 0, [self.tag1.id])], + } + ) + + self.assertEqual(len(geofence.tag_ids), 1) + self.assertIn(self.tag1, geofence.tag_ids) + + def test_create_geofence_from_qgis(self): + """Test creating geofence from QGIS plugin.""" + geofence = self.env["spp.gis.geofence"].create( + { + "name": "QGIS Geofence", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "custom", + "created_from": "qgis", + } + ) + + self.assertEqual(geofence.created_from, "qgis") + + def test_geofence_unique_name_constraint(self): + """Test that active geofences must have unique names.""" + # Create first geofence + self.env["spp.gis.geofence"].create( + { + "name": "Unique Name", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "custom", + } + ) + + # Try to create second with same name + with self.assertRaises(ValidationError) as context: + self.env["spp.gis.geofence"].create( + { + "name": "Unique Name", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "custom", + } + ) + + self.assertIn("already exists", str(context.exception)) + + def test_geofence_inactive_allows_duplicate_names(self): + """Test that inactive geofences can have duplicate names.""" + # Create first geofence and deactivate + geofence1 = self.env["spp.gis.geofence"].create( + { + "name": "Duplicate OK", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "custom", + } + ) + geofence1.active = False + + # Should be able to create second with same name + geofence2 = self.env["spp.gis.geofence"].create( + { + "name": "Duplicate OK", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "custom", + } + ) + + self.assertTrue(geofence2) + self.assertEqual(geofence2.name, "Duplicate OK") + + def test_geofence_geometry_required(self): + """Test that geometry is required (enforced at DB level).""" + # Geometry field has NOT NULL constraint at database level + with self.assertRaises(psycopg2.IntegrityError): + self.env["spp.gis.geofence"].create( + { + "name": "No Geometry", + "geofence_type": "custom", + } + ) + + def test_to_geojson_feature(self): + """Test converting geofence to GeoJSON Feature.""" + geofence = self.env["spp.gis.geofence"].create( + { + "name": "GeoJSON Test", + "description": "Test description", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "hazard_zone", + "tag_ids": [(6, 0, [self.tag1.id])], + } + ) + + feature = geofence.to_geojson() + + # Verify Feature structure + self.assertEqual(feature["type"], "Feature") + self.assertIn("geometry", feature) + self.assertIn("properties", feature) + + # Verify geometry + geometry = feature["geometry"] + self.assertEqual(geometry["type"], "Polygon") + self.assertIn("coordinates", geometry) + + # Verify properties + props = feature["properties"] + self.assertEqual(props["name"], "GeoJSON Test") + self.assertEqual(props["description"], "Test description") + self.assertEqual(props["geofence_type"], "hazard_zone") + self.assertIn("Test Tag 1", props["tags"]) + + def test_to_geojson_properties_structure(self): + """Test GeoJSON properties contain all expected fields.""" + geofence = self.env["spp.gis.geofence"].create( + { + "name": "Props Test", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "service_area", + } + ) + + feature = geofence.to_geojson() + props = feature["properties"] + + # Verify all expected properties + self.assertIn("id", props) + self.assertIn("name", props) + self.assertIn("description", props) + self.assertIn("geofence_type", props) + self.assertIn("geofence_type_label", props) + self.assertIn("area_sqkm", props) + self.assertIn("tags", props) + self.assertIn("created_from", props) + self.assertIn("created_by", props) + self.assertIn("create_date", props) + + def test_to_geojson_collection(self): + """Test converting multiple geofences to GeoJSON FeatureCollection.""" + geofence1 = self.env["spp.gis.geofence"].create( + { + "name": "Geofence 1", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "custom", + } + ) + + geofence2 = self.env["spp.gis.geofence"].create( + { + "name": "Geofence 2", + "geometry": json.dumps(self.sample_multipolygon), + "geofence_type": "hazard_zone", + } + ) + + geofences = geofence1 + geofence2 + collection = geofences.to_geojson_collection() + + # Verify FeatureCollection structure + self.assertEqual(collection["type"], "FeatureCollection") + self.assertIn("features", collection) + self.assertEqual(len(collection["features"]), 2) + + # Verify features + self.assertEqual(collection["features"][0]["type"], "Feature") + self.assertEqual(collection["features"][1]["type"], "Feature") + + def test_create_from_geojson_feature(self): + """Test creating geofence from GeoJSON Feature.""" + feature = { + "type": "Feature", + "geometry": self.sample_polygon, + "properties": { + "name": "Feature Test", + }, + } + + geofence = self.env["spp.gis.geofence"].create_from_geojson( + geojson_str=json.dumps(feature), + name="Created From Feature", + geofence_type="custom", + created_from="api", + ) + + self.assertTrue(geofence) + self.assertEqual(geofence.name, "Created From Feature") + self.assertEqual(geofence.geofence_type, "custom") + self.assertEqual(geofence.created_from, "api") + + def test_create_from_geojson_feature_collection(self): + """Test creating geofence from GeoJSON FeatureCollection.""" + feature_collection = { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": self.sample_polygon, + "properties": {}, + } + ], + } + + geofence = self.env["spp.gis.geofence"].create_from_geojson( + geojson_str=json.dumps(feature_collection), + name="Created From Collection", + geofence_type="service_area", + ) + + self.assertTrue(geofence) + self.assertEqual(geofence.name, "Created From Collection") + + def test_create_from_geojson_raw_geometry(self): + """Test creating geofence from raw GeoJSON geometry.""" + geofence = self.env["spp.gis.geofence"].create_from_geojson( + geojson_str=json.dumps(self.sample_polygon), + name="Created From Geometry", + geofence_type="custom", + ) + + self.assertTrue(geofence) + self.assertEqual(geofence.name, "Created From Geometry") + + def test_create_from_geojson_invalid_json(self): + """Test creating geofence from invalid JSON raises error.""" + with self.assertRaises(ValidationError) as context: + self.env["spp.gis.geofence"].create_from_geojson( + geojson_str="invalid json", + name="Invalid", + geofence_type="custom", + ) + + self.assertIn("Invalid GeoJSON", str(context.exception)) + + def test_create_from_geojson_empty_feature_collection(self): + """Test creating geofence from empty FeatureCollection raises error.""" + feature_collection = { + "type": "FeatureCollection", + "features": [], + } + + with self.assertRaises(ValidationError) as context: + self.env["spp.gis.geofence"].create_from_geojson( + geojson_str=json.dumps(feature_collection), + name="Empty", + geofence_type="custom", + ) + + self.assertIn("must contain at least one feature", str(context.exception)) + + def test_create_from_geojson_no_geometry(self): + """Test creating geofence without geometry raises error.""" + feature = { + "type": "Feature", + "geometry": None, + "properties": {}, + } + + with self.assertRaises(ValidationError) as context: + self.env["spp.gis.geofence"].create_from_geojson( + geojson_str=json.dumps(feature), + name="No Geometry", + geofence_type="custom", + ) + + self.assertIn("No geometry found", str(context.exception)) + + def test_create_from_geojson_with_additional_fields(self): + """Test creating geofence with additional field values.""" + geofence = self.env["spp.gis.geofence"].create_from_geojson( + geojson_str=json.dumps(self.sample_polygon), + name="With Fields", + geofence_type="hazard_zone", + created_from="qgis", + description="Test description", + ) + + self.assertEqual(geofence.description, "Test description") + self.assertEqual(geofence.created_from, "qgis") + + def test_geofence_multipolygon(self): + """Test creating geofence with multipolygon geometry.""" + geofence = self.env["spp.gis.geofence"].create( + { + "name": "MultiPolygon Test", + "geometry": json.dumps(self.sample_multipolygon), + "geofence_type": "custom", + } + ) + + self.assertTrue(geofence) + feature = geofence.to_geojson() + self.assertEqual(feature["geometry"]["type"], "MultiPolygon") + + def test_geofence_area_computation(self): + """Test that area is computed (may be 0 if PostGIS not available).""" + geofence = self.env["spp.gis.geofence"].create( + { + "name": "Area Test", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "custom", + } + ) + + # Area should be computed (may be 0 if PostGIS not available in test env) + self.assertIsNotNone(geofence.area_sqkm) + self.assertGreaterEqual(geofence.area_sqkm, 0) + + def test_geofence_created_by_default(self): + """Test that created_by is set to current user by default.""" + geofence = self.env["spp.gis.geofence"].create( + { + "name": "Created By Test", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "custom", + } + ) + + self.assertEqual(geofence.created_by_id, self.env.user) + + def test_geofence_type_label_in_properties(self): + """Test that geofence_type_label is included in properties.""" + geofence = self.env["spp.gis.geofence"].create( + { + "name": "Type Label Test", + "geometry": json.dumps(self.sample_polygon), + "geofence_type": "service_area", + } + ) + + feature = geofence.to_geojson() + props = feature["properties"] + + self.assertEqual(props["geofence_type"], "service_area") + self.assertEqual(props["geofence_type_label"], "Service Area") diff --git a/spp_api_v2_gis/tests/test_layers_service.py b/spp_api_v2_gis/tests/test_layers_service.py new file mode 100644 index 00000000..e9796602 --- /dev/null +++ b/spp_api_v2_gis/tests/test_layers_service.py @@ -0,0 +1,729 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Tests for layers service.""" + +import logging + +from odoo.exceptions import MissingError +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + +_logger = logging.getLogger(__name__) + + +@tagged("post_install", "-at_install") +class TestLayersService(TransactionCase): + """Test layers service functionality.""" + + @classmethod + def setUpClass(cls): + """Set up test data.""" + super().setUpClass() + + # Create color scheme + cls.color_scheme = cls.env["spp.gis.color.scheme"].create( + { + "name": "Test Viridis", + "code": "test_viridis", + "scheme_type": "sequential", + "colors": '["#440154", "#21918c", "#fde725"]', + "default_steps": 3, + } + ) + + # Create area model reference + cls.area_model = cls.env["ir.model"].search([("model", "=", "spp.area")], limit=1) + + # Create test report + cls.report = cls.env["spp.gis.report"].create( + { + "name": "Test Layers Report", + "code": "test_layers_report", + "description": "Test report for layers", + "source_model_id": cls.area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 2, + "normalization_method": "raw", + "geometry_type": "polygon", + "color_scheme_id": cls.color_scheme.id, + "threshold_mode": "manual", + } + ) + + # Create thresholds for styling + cls.env["spp.gis.report.threshold"].create( + { + "report_id": cls.report.id, + "sequence": 10, + "min_value": 0, + "max_value": 10, + "color": "#440154", + "label": "Low", + } + ) + cls.env["spp.gis.report.threshold"].create( + { + "report_id": cls.report.id, + "sequence": 20, + "min_value": 10, + "max_value": 50, + "color": "#21918c", + "label": "Medium", + } + ) + + # Create test areas for filtering + cls.parent_area = cls.env["spp.area"].create( + { + "draft_name": "Test Parent Area", + "code": "test_parent", + "level": 1, + } + ) + + cls.child_area1 = cls.env["spp.area"].create( + { + "draft_name": "Test Child Area 1", + "code": "test_child_1", + "level": 2, + "parent_id": cls.parent_area.id, + } + ) + + cls.child_area2 = cls.env["spp.area"].create( + { + "draft_name": "Test Child Area 2", + "code": "test_child_2", + "level": 2, + "parent_id": cls.parent_area.id, + } + ) + + # Create data layer if geo field exists + cls.geo_field = cls.env["ir.model.fields"].search( + [("model", "=", "spp.area"), ("name", "=", "polygon")], + limit=1, + ) + if cls.geo_field: + cls.data_layer = cls.env["spp.gis.data.layer"].create( + { + "name": "Test Areas Layer", + "model_name": "spp.area", + "geo_field_id": cls.geo_field.id, + "geo_repr": "basic", + "domain": "[('level', '=', 2)]", + } + ) + else: + cls.data_layer = None + + def test_get_report_layer_as_geojson(self): + """Test getting report layer as GeoJSON.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + geojson = service.get_layer_geojson( + layer_id="test_layers_report", + layer_type="report", + ) + + # Verify GeoJSON structure + self.assertEqual(geojson["type"], "FeatureCollection") + self.assertIn("features", geojson) + self.assertIn("metadata", geojson) + self.assertIn("styling", geojson) + + # Verify metadata + metadata = geojson["metadata"] + self.assertIn("styling", metadata) + + # Verify styling hints + styling = geojson["styling"] + self.assertEqual(styling["geometry_type"], "polygon") + self.assertEqual(styling["threshold_mode"], "manual") + self.assertIn("color_scheme", styling) + self.assertIn("thresholds", styling) + self.assertGreaterEqual(len(styling["thresholds"]), 2) + + def test_get_report_layer_invalid_code(self): + """Test getting report with invalid code raises error.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + + with self.assertRaises(MissingError) as context: + service.get_layer_geojson( + layer_id="nonexistent_report", + layer_type="report", + ) + + self.assertIn("not found", str(context.exception)) + + def test_get_report_layer_filter_by_admin_level(self): + """Test filtering report by admin level.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + geojson = service.get_layer_geojson( + layer_id="test_layers_report", + layer_type="report", + admin_level=2, + ) + + # Verify request completes successfully + self.assertEqual(geojson["type"], "FeatureCollection") + + def test_get_report_layer_filter_by_area_codes(self): + """Test filtering report by area codes.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + geojson = service.get_layer_geojson( + layer_id="test_layers_report", + layer_type="report", + area_codes=["test_child_1", "test_child_2"], + ) + + # Verify request completes successfully + self.assertEqual(geojson["type"], "FeatureCollection") + + def test_get_report_layer_filter_by_parent_area(self): + """Test filtering report by parent area code.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + geojson = service.get_layer_geojson( + layer_id="test_layers_report", + layer_type="report", + parent_area_code="test_parent", + ) + + # Verify request completes successfully + self.assertEqual(geojson["type"], "FeatureCollection") + + def test_get_report_layer_without_geometry(self): + """Test getting report without geometry.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + geojson = service.get_layer_geojson( + layer_id="test_layers_report", + layer_type="report", + include_geometry=False, + ) + + # Verify request completes successfully + self.assertEqual(geojson["type"], "FeatureCollection") + + def test_get_data_layer_as_geojson(self): + """Test getting data layer as GeoJSON.""" + if not self.data_layer: + self.skipTest("No data layer available (spp.area polygon field not found)") + + from ..services.layers_service import LayersService + + service = LayersService(self.env) + geojson = service.get_layer_geojson( + layer_id=str(self.data_layer.id), + layer_type="layer", + ) + + # Verify GeoJSON structure + self.assertEqual(geojson["type"], "FeatureCollection") + self.assertIn("features", geojson) + self.assertIn("metadata", geojson) + + # Verify metadata + metadata = geojson["metadata"] + self.assertIn("layer", metadata) + layer_info = metadata["layer"] + self.assertEqual(layer_info["name"], "Test Areas Layer") + self.assertEqual(layer_info["model"], "spp.area") + + def test_get_data_layer_invalid_id(self): + """Test getting data layer with invalid ID raises error.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + + # Test with non-existent numeric ID + with self.assertRaises(MissingError): + service.get_layer_geojson( + layer_id="99999", + layer_type="layer", + ) + + def test_get_data_layer_invalid_id_format(self): + """Test getting data layer with invalid ID format raises error.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + + # Test with non-numeric ID + with self.assertRaises(ValueError): + service.get_layer_geojson( + layer_id="not_a_number", + layer_type="layer", + ) + + def test_invalid_layer_type(self): + """Test invalid layer_type parameter raises error.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + + with self.assertRaises(ValueError) as context: + service.get_layer_geojson( + layer_id="test_layers_report", + layer_type="invalid_type", + ) + + self.assertIn("Invalid layer_type", str(context.exception)) + self.assertIn("Must be 'report' or 'layer'", str(context.exception)) + + def test_resolve_area_codes_to_ids(self): + """Test resolving area codes to IDs.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + area_ids = service._resolve_area_codes(["test_child_1", "test_child_2"]) + + self.assertIsNotNone(area_ids) + self.assertIn(self.child_area1.id, area_ids) + self.assertIn(self.child_area2.id, area_ids) + + def test_resolve_area_codes_empty_list(self): + """Test resolving empty area codes list returns None.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + area_ids = service._resolve_area_codes([]) + + self.assertIsNone(area_ids) + + def test_resolve_area_codes_none(self): + """Test resolving None area codes returns None.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + area_ids = service._resolve_area_codes(None) + + self.assertIsNone(area_ids) + + def test_resolve_area_codes_nonexistent(self): + """Test resolving nonexistent area codes returns None.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + area_ids = service._resolve_area_codes(["nonexistent_code"]) + + # Should return None when no areas found + self.assertIsNone(area_ids) + + def test_build_report_styling_with_color_scheme(self): + """Test building styling from report with color scheme.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + styling = service._build_report_styling(self.report) + + # Verify styling structure + self.assertEqual(styling["geometry_type"], "polygon") + self.assertEqual(styling["threshold_mode"], "manual") + self.assertIsNotNone(styling["color_scheme"]) + self.assertEqual(styling["color_scheme"]["code"], "test_viridis") + self.assertGreaterEqual(len(styling["thresholds"]), 2) + + # Verify thresholds + threshold1 = styling["thresholds"][0] + self.assertEqual(threshold1["min_value"], 0) + self.assertEqual(threshold1["max_value"], 10) + self.assertEqual(threshold1["color"], "#440154") + self.assertEqual(threshold1["label"], "Low") + + def test_build_report_styling_without_color_scheme(self): + """Test building styling from report using default color scheme.""" + # Create report without explicitly setting color scheme (uses default) + report = self.env["spp.gis.report"].create( + { + "name": "Test Report No Colors", + "code": "test_no_colors", + "source_model_id": self.area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 2, + "normalization_method": "raw", + "geometry_type": "point", + } + ) + + from ..services.layers_service import LayersService + + service = LayersService(self.env) + styling = service._build_report_styling(report) + + # Verify styling structure + self.assertEqual(styling["geometry_type"], "point") + # Should use default color scheme (not None) + self.assertIsNotNone(styling["color_scheme"]) + self.assertIn("code", styling["color_scheme"]) + self.assertEqual(len(styling["thresholds"]), 0) + + def test_build_layer_styling(self): + """Test building styling from data layer.""" + if not self.data_layer: + self.skipTest("No data layer available") + + from ..services.layers_service import LayersService + + service = LayersService(self.env) + styling = service._build_layer_styling(self.data_layer) + + # Verify styling structure + self.assertIn("geometry_type", styling) + self.assertEqual(styling["representation"], "basic") + + def test_fetch_layer_features_limit(self): + """Test that fetching layer features respects limit.""" + if not self.data_layer: + self.skipTest("No data layer available") + + from ..services.layers_service import LayersService + + service = LayersService(self.env) + features = service._fetch_layer_features(self.data_layer, include_geometry=True) + + # Should not exceed 5000 features + self.assertLessEqual(len(features), 5000) + + def test_fetch_layer_features_without_geometry(self): + """Test fetching layer features without geometry.""" + if not self.data_layer: + self.skipTest("No data layer available") + + from ..services.layers_service import LayersService + + service = LayersService(self.env) + features = service._fetch_layer_features(self.data_layer, include_geometry=False) + + # Verify features have no geometry + for feature in features: + self.assertIsNone(feature["geometry"]) + + def test_color_scheme_in_styling(self): + """Test color scheme is included in styling.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + geojson = service.get_layer_geojson( + layer_id="test_layers_report", + layer_type="report", + ) + + # Verify color scheme in styling + styling = geojson["styling"] + self.assertIsNotNone(styling["color_scheme"]) + self.assertEqual(styling["color_scheme"]["code"], "test_viridis") + self.assertEqual(styling["color_scheme"]["name"], "Test Viridis") + self.assertEqual(styling["color_scheme"]["type"], "sequential") + + def test_thresholds_in_styling(self): + """Test thresholds are included in styling.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + geojson = service.get_layer_geojson( + layer_id="test_layers_report", + layer_type="report", + ) + + # Verify thresholds in styling + styling = geojson["styling"] + self.assertGreaterEqual(len(styling["thresholds"]), 2) + + # Verify threshold structure + threshold = styling["thresholds"][0] + self.assertIn("min_value", threshold) + self.assertIn("max_value", threshold) + self.assertIn("color", threshold) + self.assertIn("label", threshold) + + def test_get_feature_count_with_admin_level(self): + """Test get_feature_count filters by admin_level when provided.""" + # Create report data at specific levels + area_type_1 = self.env["spp.area.type"].create({"name": "LS Country"}) + area_type_2 = self.env["spp.area.type"].create({"name": "LS Region"}) + + area_l0 = self.env["spp.area"].create( + { + "draft_name": "LS Feature Count Country", + "code": "ls_fc_country", + "area_type_id": area_type_1.id, + } + ) + area_l1 = self.env["spp.area"].create( + { + "draft_name": "LS Feature Count Region", + "code": "ls_fc_region", + "parent_id": area_l0.id, + "area_type_id": area_type_2.id, + } + ) + + self.env["spp.gis.report.data"].create( + { + "report_id": self.report.id, + "area_id": area_l0.id, + "area_code": area_l0.code, + "area_name": area_l0.draft_name, + "area_level": area_l0.area_level, + "raw_value": 100.0, + "normalized_value": 1.0, + "display_value": "100", + "record_count": 100, + } + ) + self.env["spp.gis.report.data"].create( + { + "report_id": self.report.id, + "area_id": area_l1.id, + "area_code": area_l1.code, + "area_name": area_l1.draft_name, + "area_level": area_l1.area_level, + "raw_value": 50.0, + "normalized_value": 0.5, + "display_value": "50", + "record_count": 50, + } + ) + + from ..services.layers_service import LayersService + + service = LayersService(self.env) + + # Count with specific admin level should be less than total + total_count = service.get_feature_count("test_layers_report", "report") + level0_count = service.get_feature_count("test_layers_report", "report", admin_level=area_l0.area_level) + level1_count = service.get_feature_count("test_layers_report", "report", admin_level=area_l1.area_level) + + self.assertGreater(total_count, 0) + self.assertGreater(level0_count, 0) + self.assertGreater(level1_count, 0) + # Filtered counts should be less than or equal to total + self.assertLessEqual(level0_count, total_count) + self.assertLessEqual(level1_count, total_count) + + def test_get_feature_count_without_admin_level(self): + """Test get_feature_count returns total count when no admin_level.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + count = service.get_feature_count("test_layers_report", "report") + self.assertIsInstance(count, int) + self.assertGreaterEqual(count, 0) + + +@tagged("post_install", "-at_install") +class TestBboxFeatureFilter(TransactionCase): + """Test Python-level bbox filtering of GeoJSON features.""" + + def test_filter_features_matching_bbox(self): + """Test that features inside the bbox are kept.""" + from ..services.layers_service import filter_features_by_bbox + + features = [ + { + "type": "Feature", + "id": "manila", + "properties": {"name": "Manila"}, + "geometry": { + "type": "Polygon", + "coordinates": [[[120.9, 14.5], [121.1, 14.5], [121.1, 14.7], [120.9, 14.7], [120.9, 14.5]]], + }, + }, + ] + # bbox that fully contains Manila + result = filter_features_by_bbox(features, [120.0, 14.0, 122.0, 15.0]) + self.assertEqual(len(result), 1) + self.assertEqual(result[0]["id"], "manila") + + def test_filter_features_outside_bbox(self): + """Test that features outside the bbox are excluded.""" + from ..services.layers_service import filter_features_by_bbox + + features = [ + { + "type": "Feature", + "id": "manila", + "properties": {"name": "Manila"}, + "geometry": { + "type": "Polygon", + "coordinates": [[[120.9, 14.5], [121.1, 14.5], [121.1, 14.7], [120.9, 14.7], [120.9, 14.5]]], + }, + }, + ] + # bbox far from Manila (in Europe) + result = filter_features_by_bbox(features, [0.0, 40.0, 10.0, 50.0]) + self.assertEqual(len(result), 0) + + def test_filter_features_partial_overlap(self): + """Test that features partially overlapping bbox are included.""" + from ..services.layers_service import filter_features_by_bbox + + features = [ + { + "type": "Feature", + "id": "manila", + "properties": {"name": "Manila"}, + "geometry": { + "type": "Polygon", + "coordinates": [[[120.9, 14.5], [121.1, 14.5], [121.1, 14.7], [120.9, 14.7], [120.9, 14.5]]], + }, + }, + ] + # bbox that partially overlaps Manila (cuts through it) + result = filter_features_by_bbox(features, [121.0, 14.0, 122.0, 15.0]) + self.assertEqual(len(result), 1) + + def test_filter_features_null_geometry_excluded(self): + """Test that features with null geometry are excluded.""" + from ..services.layers_service import filter_features_by_bbox + + features = [ + { + "type": "Feature", + "id": "no_geom", + "properties": {"name": "No Geometry"}, + "geometry": None, + }, + ] + result = filter_features_by_bbox(features, [0.0, 0.0, 180.0, 90.0]) + self.assertEqual(len(result), 0) + + def test_filter_features_mixed(self): + """Test filtering a mix of inside, outside, and null geometry features.""" + from ..services.layers_service import filter_features_by_bbox + + features = [ + { + "type": "Feature", + "id": "inside", + "properties": {}, + "geometry": { + "type": "Polygon", + "coordinates": [[[121.0, 14.5], [121.2, 14.5], [121.2, 14.7], [121.0, 14.7], [121.0, 14.5]]], + }, + }, + { + "type": "Feature", + "id": "outside", + "properties": {}, + "geometry": { + "type": "Polygon", + "coordinates": [[[1.0, 1.0], [2.0, 1.0], [2.0, 2.0], [1.0, 2.0], [1.0, 1.0]]], + }, + }, + { + "type": "Feature", + "id": "null_geom", + "properties": {}, + "geometry": None, + }, + ] + result = filter_features_by_bbox(features, [120.0, 14.0, 122.0, 15.0]) + self.assertEqual(len(result), 1) + self.assertEqual(result[0]["id"], "inside") + + def test_filter_features_multipolygon(self): + """Test bbox filtering works with MultiPolygon geometries.""" + from ..services.layers_service import filter_features_by_bbox + + features = [ + { + "type": "Feature", + "id": "multi", + "properties": {}, + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [[[121.0, 14.5], [121.2, 14.5], [121.2, 14.7], [121.0, 14.7], [121.0, 14.5]]], + [[[122.0, 15.0], [122.2, 15.0], [122.2, 15.2], [122.0, 15.2], [122.0, 15.0]]], + ], + }, + }, + ] + # bbox that contains only the first polygon + result = filter_features_by_bbox(features, [120.0, 14.0, 121.5, 15.0]) + self.assertEqual(len(result), 1) + + def test_filter_features_empty_list(self): + """Test filtering empty feature list returns empty.""" + from ..services.layers_service import filter_features_by_bbox + + result = filter_features_by_bbox([], [0.0, 0.0, 180.0, 90.0]) + self.assertEqual(len(result), 0) + + +@tagged("post_install", "-at_install") +class TestReportGeoJSONCache(TransactionCase): + """Test report GeoJSON caching in LayersService.""" + + @classmethod + def setUpClass(cls): + """Set up test data.""" + super().setUpClass() + cls.area_model = cls.env["ir.model"].search([("model", "=", "spp.area")], limit=1) + cls.report = cls.env["spp.gis.report"].create( + { + "name": "Cache Test Report", + "code": "cache_test_report", + "source_model_id": cls.area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 2, + "normalization_method": "raw", + "geometry_type": "polygon", + } + ) + + def test_cache_hit_returns_same_data(self): + """Test that cached report GeoJSON returns same features.""" + from ..services.layers_service import LayersService, _report_geojson_cache + + # Clear cache before test + _report_geojson_cache.clear() + + service = LayersService(self.env) + + # First call populates cache + geojson1 = service.get_layer_geojson( + layer_id="cache_test_report", + layer_type="report", + admin_level=2, + ) + + # Second call should hit cache — verify same result + geojson2 = service.get_layer_geojson( + layer_id="cache_test_report", + layer_type="report", + admin_level=2, + ) + + self.assertEqual(len(geojson1["features"]), len(geojson2["features"])) + self.assertEqual(geojson1["type"], geojson2["type"]) + + def test_cache_populated_after_first_call(self): + """Test that cache contains entry after first call.""" + from ..services.layers_service import LayersService, _report_geojson_cache + + _report_geojson_cache.clear() + + service = LayersService(self.env) + service.get_layer_geojson( + layer_id="cache_test_report", + layer_type="report", + admin_level=2, + ) + + # Cache should have an entry for this report+level + self.assertGreater(len(_report_geojson_cache), 0) diff --git a/spp_api_v2_gis/tests/test_ogc_features.py b/spp_api_v2_gis/tests/test_ogc_features.py new file mode 100644 index 00000000..6b87797f --- /dev/null +++ b/spp_api_v2_gis/tests/test_ogc_features.py @@ -0,0 +1,619 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Tests for OGC API - Features service.""" + +import logging +from datetime import datetime, timedelta + +from odoo.exceptions import MissingError +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + +_logger = logging.getLogger(__name__) + + +@tagged("post_install", "-at_install") +class TestOGCService(TransactionCase): + """Test OGC API - Features service functionality.""" + + @classmethod + def setUpClass(cls): + """Set up test data.""" + super().setUpClass() + + # Create color scheme for reports + cls.color_scheme = cls.env["spp.gis.color.scheme"].create( + { + "name": "OGC Test Scheme", + "code": "ogc_test_scheme", + "scheme_type": "sequential", + "colors": '["#440154", "#21918c", "#fde725"]', + "default_steps": 3, + } + ) + + # Create report category + cls.category = cls.env["spp.gis.report.category"].create( + { + "name": "OGC Test Category", + "code": "ogc_test_category", + "sequence": 10, + } + ) + + # Create area model reference + cls.area_model = cls.env["ir.model"].search([("model", "=", "spp.area")], limit=1) + + # Create test reports + cls.report1 = cls.env["spp.gis.report"].create( + { + "name": "OGC Report One", + "code": "ogc_report_one", + "description": "First OGC test report", + "category_id": cls.category.id, + "source_model_id": cls.area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 2, + "normalization_method": "raw", + "geometry_type": "polygon", + "color_scheme_id": cls.color_scheme.id, + "last_refresh": datetime.now() - timedelta(hours=1), + "sequence": 10, + } + ) + + cls.report2 = cls.env["spp.gis.report"].create( + { + "name": "OGC Report Two", + "code": "ogc_report_two", + "source_model_id": cls.area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 3, + "normalization_method": "raw", + "geometry_type": "point", + "sequence": 20, + } + ) + + # Create data layer if geo field exists + cls.geo_field = cls.env["ir.model.fields"].search( + [("model", "=", "spp.area"), ("name", "=", "polygon")], + limit=1, + ) + if cls.geo_field: + cls.data_layer = cls.env["spp.gis.data.layer"].create( + { + "name": "OGC Test Layer", + "model_name": "spp.area", + "geo_field_id": cls.geo_field.id, + "geo_repr": "basic", + "sequence": 10, + } + ) + else: + cls.data_layer = None + + # Create area types and areas at multiple levels for fan-out tests + cls.area_type_country = cls.env["spp.area.type"].create({"name": "OGC Country"}) + cls.area_type_region = cls.env["spp.area.type"].create({"name": "OGC Region"}) + + cls.area_country = cls.env["spp.area"].create( + { + "draft_name": "OGC Test Country", + "code": "ogc_test_country", + "area_type_id": cls.area_type_country.id, + } + ) + cls.area_region = cls.env["spp.area"].create( + { + "draft_name": "OGC Test Region", + "code": "ogc_test_region", + "parent_id": cls.area_country.id, + "area_type_id": cls.area_type_region.id, + } + ) + + # Create report data at levels 0 and 1 for report1 + cls.env["spp.gis.report.data"].create( + { + "report_id": cls.report1.id, + "area_id": cls.area_country.id, + "area_code": cls.area_country.code, + "area_name": cls.area_country.draft_name, + "area_level": cls.area_country.area_level, + "raw_value": 1000.0, + "normalized_value": 1.0, + "display_value": "1000", + "record_count": 1000, + } + ) + cls.env["spp.gis.report.data"].create( + { + "report_id": cls.report1.id, + "area_id": cls.area_region.id, + "area_code": cls.area_region.code, + "area_name": cls.area_region.draft_name, + "area_level": cls.area_region.area_level, + "raw_value": 500.0, + "normalized_value": 0.5, + "display_value": "500", + "record_count": 500, + } + ) + + # === Landing Page Tests === + + def test_landing_page_structure(self): + """Test landing page has required fields and links.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + page = service.get_landing_page() + + self.assertIn("title", page) + self.assertIn("description", page) + self.assertIn("links", page) + self.assertIsInstance(page["links"], list) + + def test_landing_page_has_required_links(self): + """Test landing page contains self, conformance, and data links.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + page = service.get_landing_page() + + link_rels = [link["rel"] for link in page["links"]] + self.assertIn("self", link_rels) + self.assertIn("conformance", link_rels) + self.assertIn("data", link_rels) + self.assertIn("service-desc", link_rels) + + # === Conformance Tests === + + def test_conformance_structure(self): + """Test conformance response has conformsTo list.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env) + conf = service.get_conformance() + + self.assertIn("conformsTo", conf) + self.assertIsInstance(conf["conformsTo"], list) + self.assertGreater(len(conf["conformsTo"]), 0) + + def test_conformance_includes_core(self): + """Test conformance declares Core conformance class.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env) + conf = service.get_conformance() + + self.assertIn( + "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core", + conf["conformsTo"], + ) + + def test_conformance_includes_geojson(self): + """Test conformance declares GeoJSON conformance class.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env) + conf = service.get_conformance() + + self.assertIn( + "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson", + conf["conformsTo"], + ) + + # === Collections Tests === + + def test_get_collections_structure(self): + """Test collections response structure.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collections() + + self.assertIn("links", result) + self.assertIn("collections", result) + self.assertIsInstance(result["collections"], list) + + def test_get_collections_contains_reports(self): + """Test collections include GIS reports with _admN suffixes.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collections() + + collection_ids = [c["id"] for c in result["collections"]] + # Report 1 has data at two levels, should have _admN entries + report1_collections = [cid for cid in collection_ids if cid.startswith("ogc_report_one")] + self.assertGreaterEqual(len(report1_collections), 2, "Report with 2 levels should have at least 2 collections") + + def test_get_collections_contains_data_layers(self): + """Test collections include data layers.""" + if not self.data_layer: + self.skipTest("No data layer available (spp.area polygon field not found)") + + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collections() + + collection_ids = [c["id"] for c in result["collections"]] + expected_id = f"layer_{self.data_layer.id}" + self.assertIn(expected_id, collection_ids) + + def test_collection_has_required_fields(self): + """Test each collection has required OGC fields.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collections() + + for collection in result["collections"]: + self.assertIn("id", collection) + self.assertIn("title", collection) + self.assertIn("links", collection) + self.assertIsInstance(collection["links"], list) + + def test_report_collection_has_items_link(self): + """Test report collection has items link.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collections() + + # Find any ogc_report_one collection (with _admN suffix) + report_collection = next( + (c for c in result["collections"] if c["id"].startswith("ogc_report_one")), + None, + ) + self.assertIsNotNone(report_collection) + + link_rels = [link["rel"] for link in report_collection["links"]] + self.assertIn("items", link_rels) + self.assertIn("self", link_rels) + + def test_report_collection_has_qml_link(self): + """Test report collection has QML describedby link.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collections() + + # Find any ogc_report_one collection (with _admN suffix) + report_collection = next( + (c for c in result["collections"] if c["id"].startswith("ogc_report_one")), + None, + ) + self.assertIsNotNone(report_collection) + + link_rels = [link["rel"] for link in report_collection["links"]] + self.assertIn("describedby", link_rels) + + qml_link = next( + (link for link in report_collection["links"] if link["rel"] == "describedby"), + None, + ) + self.assertIsNotNone(qml_link) + self.assertEqual(qml_link["type"], "text/xml") + + def test_report_driven_data_layer_collection_has_qml_link(self): + """Test report-driven data layer collection has QML describedby link.""" + if not self.geo_field: + self.skipTest("No geo field available for data layer creation") + # Create a report-driven data layer + report_layer = self.env["spp.gis.data.layer"].create( + { + "name": "QML Link Test Layer", + "source_type": "report", + "report_id": self.report1.id, + "geo_field_id": self.geo_field.id, + "geo_repr": "choropleth", + "sequence": 99, + } + ) + + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collections() + + layer_collection_id = f"layer_{report_layer.id}" + layer_collection = next( + (c for c in result["collections"] if c["id"] == layer_collection_id), + None, + ) + self.assertIsNotNone(layer_collection, "Report-driven data layer should be in collections") + + link_rels = [link["rel"] for link in layer_collection["links"]] + self.assertIn("describedby", link_rels, "Report-driven layer should have QML link") + + qml_link = next( + (link for link in layer_collection["links"] if link["rel"] == "describedby"), + None, + ) + self.assertEqual(qml_link["type"], "text/xml") + self.assertIn("/qml", qml_link["href"]) + + def test_model_driven_data_layer_collection_has_no_qml_link(self): + """Test model-driven data layer collection does not have QML link.""" + if not self.data_layer: + self.skipTest("No data layer available") + + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collections() + + layer_collection_id = f"layer_{self.data_layer.id}" + layer_collection = next( + (c for c in result["collections"] if c["id"] == layer_collection_id), + None, + ) + self.assertIsNotNone(layer_collection) + + link_rels = [link["rel"] for link in layer_collection["links"]] + self.assertNotIn("describedby", link_rels, "Model-driven layer should NOT have QML link") + + def test_report_collection_has_temporal_extent(self): + """Test report with last_refresh has temporal extent.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collections() + + # Find any ogc_report_one collection (with _admN suffix) + report_collection = next( + (c for c in result["collections"] if c["id"].startswith("ogc_report_one")), + None, + ) + self.assertIsNotNone(report_collection) + self.assertIn("extent", report_collection) + self.assertIn("temporal", report_collection["extent"]) + + # === Single Collection Tests === + + def test_get_collection_by_report_code(self): + """Test getting single collection by bare report code defaults to base level.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + collection = service.get_collection("ogc_report_one") + + # Bare code defaults to base_area_level (2), which becomes _admN + self.assertIn("ogc_report_one", collection["id"]) + self.assertIn("OGC Report One", collection["title"]) + + def test_get_collection_by_layer_id(self): + """Test getting single collection by data layer ID.""" + if not self.data_layer: + self.skipTest("No data layer available") + + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + collection_id = f"layer_{self.data_layer.id}" + collection = service.get_collection(collection_id) + + self.assertEqual(collection["id"], collection_id) + self.assertEqual(collection["title"], "OGC Test Layer") + + def test_get_collection_not_found(self): + """Test getting non-existent collection raises MissingError.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + + with self.assertRaises(MissingError): + service.get_collection("nonexistent_collection") + + # === Collection Items Tests === + + def test_get_collection_items_structure(self): + """Test items response is a GeoJSON FeatureCollection.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collection_items("ogc_report_one") + + self.assertEqual(result["type"], "FeatureCollection") + self.assertIn("features", result) + self.assertIn("links", result) + self.assertIn("numberMatched", result) + self.assertIn("numberReturned", result) + + def test_get_collection_items_pagination(self): + """Test items pagination with limit and offset.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + + # Request with small limit + result = service.get_collection_items("ogc_report_one", limit=2, offset=0) + + self.assertLessEqual(result["numberReturned"], 2) + self.assertGreaterEqual(result["numberMatched"], 0) + + def test_get_collection_items_has_self_link(self): + """Test items response includes self link.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collection_items("ogc_report_one") + + link_rels = [link["rel"] for link in result["links"]] + self.assertIn("self", link_rels) + self.assertIn("collection", link_rels) + + def test_get_collection_items_not_found(self): + """Test items for non-existent collection raises error.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + + with self.assertRaises(MissingError): + service.get_collection_items("nonexistent_collection") + + # === Single Feature Tests === + + def test_get_collection_item_not_found(self): + """Test getting non-existent feature raises MissingError.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + + with self.assertRaises(MissingError): + service.get_collection_item("ogc_report_one", "99999999") + + # === Collection ID Parsing Tests === + + def test_parse_collection_id_report(self): + """Test parsing report code collection ID returns 3-tuple.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env) + layer_type, layer_id, admin_level = service._parse_collection_id("pop_density") + + self.assertEqual(layer_type, "report") + self.assertEqual(layer_id, "pop_density") + self.assertIsNone(admin_level) + + def test_parse_collection_id_layer(self): + """Test parsing layer_ prefixed collection ID returns 3-tuple.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env) + layer_type, layer_id, admin_level = service._parse_collection_id("layer_42") + + self.assertEqual(layer_type, "layer") + self.assertEqual(layer_id, "42") + self.assertIsNone(admin_level) + + def test_parse_collection_id_with_admin_level(self): + """Test parsing collection ID with _admN suffix extracts admin level.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env) + layer_type, layer_id, admin_level = service._parse_collection_id("pop_density_adm2") + + self.assertEqual(layer_type, "report") + self.assertEqual(layer_id, "pop_density") + self.assertEqual(admin_level, 2) + + def test_parse_collection_id_with_admin_level_0(self): + """Test parsing collection ID with _adm0 suffix extracts level 0.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env) + layer_type, layer_id, admin_level = service._parse_collection_id("my_report_adm0") + + self.assertEqual(layer_type, "report") + self.assertEqual(layer_id, "my_report") + self.assertEqual(admin_level, 0) + + # === Bbox Push-Down Tests === + + def test_bbox_to_geojson_helper(self): + """Test _bbox_to_geojson converts bbox array to GeoJSON Polygon.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + result = service._bbox_to_geojson([5.0, 15.0, 15.0, 25.0]) + + self.assertEqual(result["type"], "Polygon") + coords = result["coordinates"][0] + self.assertEqual(len(coords), 5) + # First and last coordinate should be the same (closed ring) + self.assertEqual(coords[0], coords[-1]) + # Check corners: SW, SE, NE, NW, SW + self.assertEqual(coords[0], [5.0, 15.0]) + self.assertEqual(coords[1], [15.0, 15.0]) + self.assertEqual(coords[2], [15.0, 25.0]) + self.assertEqual(coords[3], [5.0, 25.0]) + + def test_bbox_none_does_not_filter(self): + """Test bbox=None produces same result as no bbox.""" + from ..services.layers_service import LayersService + + service = LayersService(self.env) + result_without = service.get_layer_geojson( + layer_id="ogc_report_one", + layer_type="report", + ) + result_with_none = service.get_layer_geojson( + layer_id="ogc_report_one", + layer_type="report", + bbox=None, + ) + self.assertEqual( + len(result_without.get("features", [])), + len(result_with_none.get("features", [])), + ) + + def test_ogc_items_accepts_bbox_parameter(self): + """Test OGC get_collection_items accepts bbox parameter.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + # Without bbox should work + result = service.get_collection_items("ogc_report_one") + self.assertEqual(result["type"], "FeatureCollection") + self.assertIn("features", result) + + # === Per-Level Fan-Out Tests === + + def test_get_collections_fans_out_by_level(self): + """Test that a report with data at 2 levels produces 2 collections.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collections() + + # report1 has data at area_country.area_level and area_region.area_level + report1_collections = [c for c in result["collections"] if c["id"].startswith("ogc_report_one_adm")] + self.assertGreaterEqual( + len(report1_collections), 2, "Report with data at 2 levels should produce at least 2 collections" + ) + + def test_get_collection_by_adm_suffix(self): + """Test looking up collection by {code}_admN succeeds.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + + # area_country is level 0, so ogc_report_one_adm0 should work + level = self.area_country.area_level + collection = service.get_collection(f"ogc_report_one_adm{level}") + + self.assertEqual(collection["id"], f"ogc_report_one_adm{level}") + self.assertIn("OGC Report One", collection["title"]) + + def test_get_collection_bare_code_defaults_to_base_level(self): + """Test bare report code defaults to base_area_level for backward compat.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + collection = service.get_collection("ogc_report_one") + + # base_area_level for report1 is 2 + self.assertEqual(collection["id"], "ogc_report_one_adm2") + + def test_collection_title_includes_level_name(self): + """Test that collection title includes the area level name.""" + from ..services.ogc_service import OGCService + + service = OGCService(self.env, "http://localhost:8069/api/v2/spp") + result = service.get_collections() + + # Find a report1 collection + report1_collection = next( + (c for c in result["collections"] if c["id"].startswith("ogc_report_one_adm")), + None, + ) + self.assertIsNotNone(report1_collection) + # Title should contain parenthetical level info + self.assertIn("(", report1_collection["title"]) + self.assertIn(")", report1_collection["title"]) diff --git a/spp_api_v2_gis/tests/test_ogc_http.py b/spp_api_v2_gis/tests/test_ogc_http.py new file mode 100644 index 00000000..a0ba4283 --- /dev/null +++ b/spp_api_v2_gis/tests/test_ogc_http.py @@ -0,0 +1,385 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""HTTP integration tests for OGC API - Features endpoints. + +Tests the actual HTTP endpoints including authentication enforcement, +status codes, content types, and parameter parsing. +""" + +import logging +import os +import unittest +from datetime import datetime, timedelta + +from odoo.tests import tagged + +from odoo.addons.spp_api_v2.tests.common import ApiV2HttpTestCase + +_logger = logging.getLogger(__name__) + +API_BASE = "/api/v2/spp" +OGC_BASE = f"{API_BASE}/gis/ogc" + + +@tagged("post_install", "-at_install") +@unittest.skipIf(os.getenv("SKIP_HTTP_CASE"), "Skipped via SKIP_HTTP_CASE") +class TestOGCHTTP(ApiV2HttpTestCase): + """HTTP integration tests for OGC API - Features endpoints.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + # Create GIS-specific test data + cls.color_scheme = cls.env["spp.gis.color.scheme"].create( + { + "name": "HTTP Test Scheme", + "code": "http_test_scheme", + "scheme_type": "sequential", + "colors": '["#440154", "#21918c", "#fde725"]', + "default_steps": 3, + } + ) + + cls.category = cls.env["spp.gis.report.category"].create( + { + "name": "HTTP Test Category", + "code": "http_test_category", + "sequence": 10, + } + ) + + area_model = cls.env["ir.model"].search([("model", "=", "spp.area")], limit=1) + + cls.report = cls.env["spp.gis.report"].create( + { + "name": "HTTP Test Report", + "code": "http_test_report", + "description": "Report for HTTP integration tests", + "category_id": cls.category.id, + "source_model_id": area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 2, + "normalization_method": "raw", + "geometry_type": "polygon", + "color_scheme_id": cls.color_scheme.id, + "threshold_mode": "manual", + "last_refresh": datetime.now() - timedelta(hours=1), + "sequence": 10, + } + ) + + # Create thresholds for QML generation + cls.env["spp.gis.report.threshold"].create( + { + "report_id": cls.report.id, + "sequence": 10, + "min_value": 0, + "max_value": 50, + "color": "#440154", + "label": "Low", + } + ) + cls.env["spp.gis.report.threshold"].create( + { + "report_id": cls.report.id, + "sequence": 20, + "min_value": 50, + "max_value": None, + "color": "#fde725", + "label": "High", + } + ) + + # Create area types and areas at multiple levels for per-level tests + cls.area_type_country = cls.env["spp.area.type"].create({"name": "HTTP Country"}) + cls.area_type_region = cls.env["spp.area.type"].create({"name": "HTTP Region"}) + + cls.http_area_country = cls.env["spp.area"].create( + { + "draft_name": "HTTP Test Country", + "code": "http_test_country", + "area_type_id": cls.area_type_country.id, + } + ) + cls.http_area_region = cls.env["spp.area"].create( + { + "draft_name": "HTTP Test Region", + "code": "http_test_region", + "parent_id": cls.http_area_country.id, + "area_type_id": cls.area_type_region.id, + } + ) + + # Create report data at both levels + cls.env["spp.gis.report.data"].create( + { + "report_id": cls.report.id, + "area_id": cls.http_area_country.id, + "area_code": cls.http_area_country.code, + "area_name": cls.http_area_country.draft_name, + "area_level": cls.http_area_country.area_level, + "raw_value": 1000.0, + "normalized_value": 1.0, + "display_value": "1000", + "record_count": 1000, + } + ) + cls.env["spp.gis.report.data"].create( + { + "report_id": cls.report.id, + "area_id": cls.http_area_region.id, + "area_code": cls.http_area_region.code, + "area_name": cls.http_area_region.draft_name, + "area_level": cls.http_area_region.area_level, + "raw_value": 500.0, + "normalized_value": 0.5, + "display_value": "500", + "record_count": 500, + } + ) + + # Create API client with gis:read scope + cls.gis_client = cls.create_api_client( + cls, + name="GIS Test Client", + scopes=[{"resource": "gis", "action": "read"}], + ) + cls.gis_token = cls.generate_jwt_token(cls, cls.gis_client) + + # Create API client without gis scope + cls.no_gis_client = cls.create_api_client( + cls, + name="No GIS Client", + scopes=[{"resource": "individual", "action": "read"}], + ) + cls.no_gis_token = cls.generate_jwt_token(cls, cls.no_gis_client) + + def _gis_headers(self): + """Headers with valid GIS token.""" + return { + "Content-Type": "application/json", + "Authorization": f"Bearer {self.gis_token}", + } + + def _no_gis_headers(self): + """Headers with token that lacks gis:read scope.""" + return { + "Content-Type": "application/json", + "Authorization": f"Bearer {self.no_gis_token}", + } + + # === Auth enforcement === + + def test_landing_page_no_token_returns_401(self): + """Test landing page without token returns 401.""" + response = self.url_open(OGC_BASE, headers={"Content-Type": "application/json"}) + self.assertEqual(response.status_code, 401) + + def test_landing_page_no_gis_scope_returns_403(self): + """Test landing page without gis:read scope returns 403.""" + response = self.url_open(OGC_BASE, headers=self._no_gis_headers()) + self.assertEqual(response.status_code, 403) + + # === Landing page === + + def test_landing_page_returns_200(self): + """Test landing page returns 200 with valid auth.""" + response = self.url_open(OGC_BASE, headers=self._gis_headers()) + self.assertEqual(response.status_code, 200) + + def test_landing_page_has_links(self): + """Test landing page response contains navigation links.""" + response = self.url_open(OGC_BASE, headers=self._gis_headers()) + data = response.json() + self.assertIn("links", data) + link_rels = [link["rel"] for link in data["links"]] + self.assertIn("self", link_rels) + self.assertIn("conformance", link_rels) + self.assertIn("data", link_rels) + + # === Conformance === + + def test_conformance_returns_200(self): + """Test conformance endpoint returns 200.""" + response = self.url_open(f"{OGC_BASE}/conformance", headers=self._gis_headers()) + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertIn("conformsTo", data) + + # === Collections === + + def test_collections_returns_200_with_reports(self): + """Test collections endpoint returns 200 and includes test report with _admN suffix.""" + response = self.url_open(f"{OGC_BASE}/collections", headers=self._gis_headers()) + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertIn("collections", data) + collection_ids = [c["id"] for c in data["collections"]] + # Report should appear with _admN suffixes (has data at multiple levels) + report_collections = [cid for cid in collection_ids if cid.startswith("http_test_report")] + self.assertGreater(len(report_collections), 0, "Report should appear in collections") + + def test_collection_not_found_returns_404(self): + """Test non-existent collection returns 404.""" + response = self.url_open( + f"{OGC_BASE}/collections/nonexistent_collection", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 404) + + # === Items === + + def test_items_returns_geojson_content_type(self): + """Test items endpoint returns application/geo+json content type.""" + # Use bare code (backward compat defaults to base level) + response = self.url_open( + f"{OGC_BASE}/collections/http_test_report/items", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 200) + self.assertIn("application/geo+json", response.headers.get("content-type", "")) + + def test_items_invalid_bbox_returns_400(self): + """Test items with invalid bbox returns 400.""" + response = self.url_open( + f"{OGC_BASE}/collections/http_test_report/items?bbox=invalid", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 400) + + def test_items_not_found_returns_404(self): + """Test items for non-existent collection returns 404.""" + response = self.url_open( + f"{OGC_BASE}/collections/nonexistent/items", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 404) + + # === Single feature === + + def test_feature_not_found_returns_404(self): + """Test non-existent feature returns 404.""" + response = self.url_open( + f"{OGC_BASE}/collections/http_test_report/items/99999999", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 404) + + # === QML === + + def test_qml_returns_xml_content_type(self): + """Test QML endpoint returns text/xml content type.""" + response = self.url_open( + f"{OGC_BASE}/collections/http_test_report/qml", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 200) + self.assertIn("text/xml", response.headers.get("content-type", "")) + + def test_qml_for_nonexistent_data_layer_returns_404(self): + """Test QML for nonexistent data layer returns 404.""" + response = self.url_open( + f"{OGC_BASE}/collections/layer_999/qml", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 404) + + def test_qml_for_report_driven_data_layer_returns_200(self): + """Test QML for report-driven data layer returns 200 with XML.""" + # Create a report-driven data layer linked to the test report + geo_field = self.env["ir.model.fields"].search( + [("model", "=", "spp.area"), ("name", "=", "polygon")], + limit=1, + ) + if not geo_field: + self.skipTest("No geo field available for data layer creation") + report_layer = self.env["spp.gis.data.layer"].create( + { + "name": "HTTP Report Layer", + "source_type": "report", + "report_id": self.report.id, + "geo_field_id": geo_field.id, + "geo_repr": "choropleth", + "sequence": 99, + } + ) + response = self.url_open( + f"{OGC_BASE}/collections/layer_{report_layer.id}/qml", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 200) + self.assertIn("text/xml", response.headers.get("content-type", "")) + + def test_qml_for_model_driven_data_layer_returns_404(self): + """Test QML for model-driven data layer returns 404.""" + geo_field = self.env["ir.model.fields"].search( + [("model", "=", "spp.area"), ("name", "=", "polygon")], + limit=1, + ) + if not geo_field: + self.skipTest("No geo field available for model-driven layer") + + model_layer = self.env["spp.gis.data.layer"].create( + { + "name": "HTTP Model Layer", + "source_type": "model", + "model_name": "spp.area", + "geo_field_id": geo_field.id, + "geo_repr": "basic", + "sequence": 98, + } + ) + response = self.url_open( + f"{OGC_BASE}/collections/layer_{model_layer.id}/qml", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 404) + + def test_qml_for_invalid_layer_id_returns_404(self): + """Test QML for layer with invalid ID format returns 404.""" + response = self.url_open( + f"{OGC_BASE}/collections/layer_abc/qml", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 404) + + # === Admin Level Split Tests === + + def test_items_with_adm_suffix_returns_200(self): + """Test items endpoint with _admN suffix returns 200.""" + level = self.http_area_country.area_level + response = self.url_open( + f"{OGC_BASE}/collections/http_test_report_adm{level}/items", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 200) + self.assertIn("application/geo+json", response.headers.get("content-type", "")) + + def test_qml_with_adm_suffix_returns_200(self): + """Test QML endpoint with _admN suffix returns 200.""" + level = self.http_area_country.area_level + response = self.url_open( + f"{OGC_BASE}/collections/http_test_report_adm{level}/qml", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 200) + self.assertIn("text/xml", response.headers.get("content-type", "")) + + def test_bare_report_code_still_works(self): + """Test bare report code still works for backward compatibility.""" + response = self.url_open( + f"{OGC_BASE}/collections/http_test_report/items", + headers=self._gis_headers(), + ) + self.assertEqual(response.status_code, 200) + + def test_collections_list_shows_per_level_entries(self): + """Test collections list shows separate entries per admin level.""" + response = self.url_open(f"{OGC_BASE}/collections", headers=self._gis_headers()) + self.assertEqual(response.status_code, 200) + data = response.json() + collection_ids = [c["id"] for c in data["collections"]] + # Should have _admN entries for the report + adm_entries = [cid for cid in collection_ids if cid.startswith("http_test_report_adm")] + self.assertGreaterEqual(len(adm_entries), 2, "Report with 2 area levels should have at least 2 _admN entries") diff --git a/spp_api_v2_gis/tests/test_proximity_query.py b/spp_api_v2_gis/tests/test_proximity_query.py new file mode 100644 index 00000000..810315b2 --- /dev/null +++ b/spp_api_v2_gis/tests/test_proximity_query.py @@ -0,0 +1,352 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Tests for proximity query service. + +These tests cover validation, area fallback, metadata, and statistics. +Coordinate-based tests require spp_registrant_gis (adds coordinates +field to res.partner), which is not a direct dependency of spp_api_v2_gis. +Area fallback tests work with the base spp_gis module (provides +geo_polygon on spp.area). +""" + +import json + +from odoo.tests.common import TransactionCase + + +class TestProximityQueryValidation(TransactionCase): + """Test input validation in query_proximity().""" + + @classmethod + def setUpClass(cls): + """Set up minimal test data.""" + super().setUpClass() + cls.reference_points = [{"longitude": 28.0, "latitude": -2.0}] + + def _get_service(self): + """Create a SpatialQueryService instance.""" + from ..services.spatial_query_service import SpatialQueryService + + return SpatialQueryService(self.env) + + def test_empty_reference_points_raises(self): + """Test that empty reference points raises ValueError.""" + service = self._get_service() + + with self.assertRaises(ValueError): + service.query_proximity( + reference_points=[], + radius_km=5.0, + relation="within", + ) + + def test_zero_radius_raises(self): + """Test that zero radius raises ValueError.""" + service = self._get_service() + + with self.assertRaises(ValueError): + service.query_proximity( + reference_points=self.reference_points, + radius_km=0, + relation="within", + ) + + def test_negative_radius_raises(self): + """Test that negative radius raises ValueError.""" + service = self._get_service() + + with self.assertRaises(ValueError): + service.query_proximity( + reference_points=self.reference_points, + radius_km=-5.0, + relation="within", + ) + + def test_invalid_relation_raises(self): + """Test that invalid relation raises ValueError.""" + service = self._get_service() + + with self.assertRaises(ValueError): + service.query_proximity( + reference_points=self.reference_points, + radius_km=5.0, + relation="overlapping", + ) + + +class TestProximityQueryAreaFallback(TransactionCase): + """Test proximity query with area fallback. + + When spp_registrant_gis is not installed (no coordinates field on + res.partner), the service falls back to area-based proximity. + This test class sets up areas with geo_polygon data and verifies + that the area fallback path works correctly. + """ + + @classmethod + def setUpClass(cls): + """Set up test data with area polygons. + + Test geography (approximate locations in East Africa): + - Reference point (health center): lon=28.0, lat=-2.0 + - Near area: small polygon around 28.0, -2.0 (~10 km extent) + - Far area: small polygon around 32.0, -5.0 (~500 km away) + """ + super().setUpClass() + + # Create area with polygon near reference point + cls.area_near = cls.env["spp.area"].create( + { + "draft_name": "Proximity Near Area", + "code": "PROX-NEAR-001", + } + ) + near_polygon = json.dumps( + { + "type": "Polygon", + "coordinates": [[[27.95, -2.05], [28.05, -2.05], [28.05, -1.95], [27.95, -1.95], [27.95, -2.05]]], + } + ) + cls.env.cr.execute( + """ + UPDATE spp_area + SET geo_polygon = ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326) + WHERE id = %s + """, + [near_polygon, cls.area_near.id], + ) + + # Create area far from reference point + cls.area_far = cls.env["spp.area"].create( + { + "draft_name": "Proximity Far Area", + "code": "PROX-FAR-001", + } + ) + far_polygon = json.dumps( + { + "type": "Polygon", + "coordinates": [[[31.95, -5.05], [32.05, -5.05], [32.05, -4.95], [31.95, -4.95], [31.95, -5.05]]], + } + ) + cls.env.cr.execute( + """ + UPDATE spp_area + SET geo_polygon = ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326) + WHERE id = %s + """, + [far_polygon, cls.area_far.id], + ) + + # Partners in near area + cls.partner_near = cls.env["res.partner"].create( + { + "name": "Near Individual", + "is_registrant": True, + "is_group": False, + "area_id": cls.area_near.id, + } + ) + + cls.group_near = cls.env["res.partner"].create( + { + "name": "Near Household", + "is_registrant": True, + "is_group": True, + "area_id": cls.area_near.id, + } + ) + + # Partner in far area + cls.partner_far = cls.env["res.partner"].create( + { + "name": "Far Individual", + "is_registrant": True, + "is_group": False, + "area_id": cls.area_far.id, + } + ) + + cls.reference_points = [{"longitude": 28.0, "latitude": -2.0}] + + def _get_service(self): + """Create a SpatialQueryService instance.""" + from ..services.spatial_query_service import SpatialQueryService + + return SpatialQueryService(self.env) + + def test_within_returns_registrants_in_nearby_areas(self): + """Test 'within' via area fallback returns registrants in areas near ref points.""" + service = self._get_service() + + # 20 km radius should cover the near area polygon + result = service.query_proximity( + reference_points=self.reference_points, + radius_km=20.0, + relation="within", + ) + + self.assertIn("total_count", result) + self.assertIn("registrant_ids", result) + self.assertGreater(result["total_count"], 0) + + # Near partner should be in the result (either via coordinates or area fallback) + self.assertIn(self.partner_near.id, result["registrant_ids"]) + + # Far partner should NOT be within 20 km + self.assertNotIn(self.partner_far.id, result["registrant_ids"]) + + def test_beyond_returns_registrants_in_far_areas(self): + """Test 'beyond' via area fallback returns registrants in areas far from ref points.""" + service = self._get_service() + + # 20 km radius: far area (~500 km away) should be beyond + result = service.query_proximity( + reference_points=self.reference_points, + radius_km=20.0, + relation="beyond", + ) + + self.assertIn("total_count", result) + self.assertIn("registrant_ids", result) + + # Far partner should be beyond 20 km + self.assertIn(self.partner_far.id, result["registrant_ids"]) + + # Near partner should NOT be beyond 20 km + self.assertNotIn(self.partner_near.id, result["registrant_ids"]) + + def test_large_radius_includes_all_areas(self): + """Test that a large radius includes all areas.""" + service = self._get_service() + + # 1000 km radius should cover everything (~554 km to far area) + result = service.query_proximity( + reference_points=self.reference_points, + radius_km=1000.0, + relation="within", + ) + + # Both near and far should be included + self.assertIn(self.partner_near.id, result["registrant_ids"]) + self.assertIn(self.partner_far.id, result["registrant_ids"]) + + def test_is_group_filter(self): + """Test that is_group filter works with area fallback.""" + service = self._get_service() + + # Filter to groups only within 20 km + result = service.query_proximity( + reference_points=self.reference_points, + radius_km=20.0, + relation="within", + filters={"is_group": True}, + ) + + # Group should be in the result + self.assertIn(self.group_near.id, result["registrant_ids"]) + # Individual should NOT be in the result + self.assertNotIn(self.partner_near.id, result["registrant_ids"]) + + def test_is_group_false_filter(self): + """Test that is_group=False filter returns only individuals.""" + service = self._get_service() + + result = service.query_proximity( + reference_points=self.reference_points, + radius_km=20.0, + relation="within", + filters={"is_group": False}, + ) + + # Individual should be in the result + self.assertIn(self.partner_near.id, result["registrant_ids"]) + # Group should NOT be in the result + self.assertNotIn(self.group_near.id, result["registrant_ids"]) + + def test_multiple_reference_points(self): + """Test with multiple reference points covering different areas.""" + service = self._get_service() + + # Two reference points: one near each area + multi_refs = [ + {"longitude": 28.0, "latitude": -2.0}, # Near the near area + {"longitude": 32.0, "latitude": -5.0}, # Near the far area + ] + + result = service.query_proximity( + reference_points=multi_refs, + radius_km=20.0, + relation="within", + ) + + # Both should be within 20 km of at least one reference point + self.assertIn(self.partner_near.id, result["registrant_ids"]) + self.assertIn(self.partner_far.id, result["registrant_ids"]) + + def test_statistics_computed(self): + """Test that statistics are computed for matched registrants.""" + service = self._get_service() + + result = service.query_proximity( + reference_points=self.reference_points, + radius_km=20.0, + relation="within", + variables=[], + ) + + # Statistics metadata should be present + self.assertIn("statistics", result) + self.assertIsInstance(result["statistics"], dict) + self.assertIn("access_level", result) + self.assertIn("from_cache", result) + self.assertIn("computed_at", result) + + def test_reference_points_count_in_result(self): + """Test that result includes the reference points count.""" + service = self._get_service() + + result = service.query_proximity( + reference_points=self.reference_points, + radius_km=20.0, + relation="within", + ) + + self.assertEqual(result["reference_points_count"], 1) + + def test_radius_km_in_result(self): + """Test that result echoes back the radius.""" + service = self._get_service() + + result = service.query_proximity( + reference_points=self.reference_points, + radius_km=12.5, + relation="within", + ) + + self.assertEqual(result["radius_km"], 12.5) + + def test_relation_in_result(self): + """Test that result echoes back the relation.""" + service = self._get_service() + + result = service.query_proximity( + reference_points=self.reference_points, + radius_km=20.0, + relation="beyond", + ) + + self.assertEqual(result["relation"], "beyond") + + def test_areas_matched_count(self): + """Test that areas_matched is reported correctly.""" + service = self._get_service() + + result = service.query_proximity( + reference_points=self.reference_points, + radius_km=20.0, + relation="within", + ) + + # At least the near area should be matched + if result["query_method"] == "area_fallback": + self.assertGreater(result["areas_matched"], 0) diff --git a/spp_api_v2_gis/tests/test_qml_template_service.py b/spp_api_v2_gis/tests/test_qml_template_service.py new file mode 100644 index 00000000..e45b5ebc --- /dev/null +++ b/spp_api_v2_gis/tests/test_qml_template_service.py @@ -0,0 +1,414 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +"""Tests for QML template service.""" + +import logging + +from odoo.tests import tagged +from odoo.tests.common import TransactionCase + +_logger = logging.getLogger(__name__) + + +@tagged("post_install", "-at_install") +class TestQMLTemplateService(TransactionCase): + """Test QML template generation.""" + + @classmethod + def setUpClass(cls): + """Set up test data.""" + super().setUpClass() + + # Create color scheme + cls.color_scheme = cls.env["spp.gis.color.scheme"].create( + { + "name": "Test Viridis", + "code": "test_viridis", + "scheme_type": "sequential", + "colors": '["#440154", "#21918c", "#fde725"]', + "default_steps": 3, + } + ) + + # Create GIS report + cls.area_model = cls.env["ir.model"].search([("model", "=", "spp.area")], limit=1) + cls.report = cls.env["spp.gis.report"].create( + { + "name": "Test QML Report", + "code": "test_qml_report", + "source_model_id": cls.area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 2, + "normalization_method": "raw", + "geometry_type": "polygon", + "color_scheme_id": cls.color_scheme.id, + "threshold_mode": "manual", + } + ) + + # Create thresholds + cls.threshold1 = cls.env["spp.gis.report.threshold"].create( + { + "report_id": cls.report.id, + "sequence": 10, + "min_value": 0, + "max_value": 10, + "color": "#440154", + "label": "Low", + } + ) + cls.threshold2 = cls.env["spp.gis.report.threshold"].create( + { + "report_id": cls.report.id, + "sequence": 20, + "min_value": 10, + "max_value": 50, + "color": "#21918c", + "label": "Medium", + } + ) + cls.threshold3 = cls.env["spp.gis.report.threshold"].create( + { + "report_id": cls.report.id, + "sequence": 30, + "min_value": 50, + "max_value": None, + "color": "#fde725", + "label": "High", + } + ) + + def test_generate_graduated_polygon_qml(self): + """Test generating graduated polygon QML.""" + from ..services.qml_template_service import QMLTemplateService + + service = QMLTemplateService(self.env) + + # Generate QML + qml = service.generate_qml( + report_id=self.report.id, + geometry_type="polygon", + field_name="normalized_value", + opacity=0.7, + ) + + # Verify QML structure + self.assertIn("", qml) + self.assertIn("", qml) + self.assertIn("0.7", qml) + + # Verify ranges are present + self.assertIn('label="Low"', qml) + self.assertIn('label="Medium"', qml) + self.assertIn('label="High"', qml) + + # Verify colors are present (converted to RGB) + self.assertIn("68,1,84,255", qml) # #440154 + self.assertIn("33,145,140,255", qml) # #21918c + self.assertIn("253,231,37,255", qml) # #fde725 + + _logger.info("Generated polygon QML length: %d", len(qml)) + + def test_generate_point_qml(self): + """Test generating basic point QML.""" + from ..services.qml_template_service import QMLTemplateService + + # Update report to use point geometry + self.report.write({"geometry_type": "point"}) + + service = QMLTemplateService(self.env) + + # Generate QML + qml = service.generate_qml( + report_id=self.report.id, + geometry_type="point", + opacity=0.8, + ) + + # Verify QML structure + self.assertIn("0.8", qml) + + # Verify color from color scheme is present + # First color from scheme should be used + self.assertIn("68,1,84,255", qml) # #440154 + + _logger.info("Generated point QML length: %d", len(qml)) + + def test_generate_cluster_qml(self): + """Test generating clustered point QML.""" + from ..services.qml_template_service import QMLTemplateService + + # Update report to use cluster geometry + self.report.write({"geometry_type": "cluster"}) + + service = QMLTemplateService(self.env) + + # Generate QML + qml = service.generate_qml( + report_id=self.report.id, + geometry_type="cluster", + opacity=0.9, + ) + + # Verify QML structure + self.assertIn("0.9", qml) + + _logger.info("Generated cluster QML length: %d", len(qml)) + + def test_hex_to_rgb_conversion(self): + """Test hex to RGB conversion.""" + from ..services.qml_template_service import QMLTemplateService + + # Test valid hex colors + self.assertEqual(QMLTemplateService._hex_to_rgb("#440154"), "68,1,84,255") + self.assertEqual(QMLTemplateService._hex_to_rgb("#21918c"), "33,145,140,255") + self.assertEqual(QMLTemplateService._hex_to_rgb("#fde725"), "253,231,37,255") + self.assertEqual(QMLTemplateService._hex_to_rgb("#ffffff"), "255,255,255,255") + self.assertEqual(QMLTemplateService._hex_to_rgb("#000000"), "0,0,0,255") + + # Test without # prefix + self.assertEqual(QMLTemplateService._hex_to_rgb("440154"), "68,1,84,255") + + # Test invalid hex (should fallback to gray) + self.assertEqual(QMLTemplateService._hex_to_rgb("invalid"), "128,128,128,255") + self.assertEqual(QMLTemplateService._hex_to_rgb(""), "128,128,128,255") + + def test_xml_escaping(self): + """Test XML special character escaping.""" + from ..services.qml_template_service import QMLTemplateService + + self.assertEqual(QMLTemplateService._escape_xml("Test & Co"), "Test & Co") + self.assertEqual(QMLTemplateService._escape_xml(""), "<tag>") + self.assertEqual(QMLTemplateService._escape_xml('"quoted"'), ""quoted"") + self.assertEqual(QMLTemplateService._escape_xml("'single'"), "'single'") + self.assertEqual(QMLTemplateService._escape_xml("Normal text"), "Normal text") + + def test_invalid_report_id(self): + """Test QML generation with invalid report ID.""" + from ..services.qml_template_service import QMLTemplateService + + service = QMLTemplateService(self.env) + + # Should raise ValueError for non-existent report + with self.assertRaises(ValueError) as context: + service.generate_qml( + report_id=99999, + geometry_type="polygon", + ) + + self.assertIn("not found", str(context.exception)) + + def test_unsupported_geometry_type(self): + """Test QML generation with unsupported geometry type.""" + from ..services.qml_template_service import QMLTemplateService + + service = QMLTemplateService(self.env) + + # Should raise ValueError for unsupported geometry type + with self.assertRaises(ValueError) as context: + service.generate_qml( + report_id=self.report.id, + geometry_type="unsupported", + ) + + self.assertIn("Unsupported geometry type", str(context.exception)) + + def test_report_without_thresholds(self): + """Test QML generation for report without thresholds.""" + from ..services.qml_template_service import QMLTemplateService + + # Create report without thresholds + report = self.env["spp.gis.report"].create( + { + "name": "Test Report No Thresholds", + "code": "test_no_thresholds", + "source_model_id": self.area_model.id, + "area_field_path": "area_id", + "aggregation_method": "count", + "base_area_level": 2, + "normalization_method": "raw", + "geometry_type": "polygon", + "color_scheme_id": self.color_scheme.id, + } + ) + + service = QMLTemplateService(self.env) + + # Should generate default QML with single class + qml = service.generate_qml( + report_id=report.id, + geometry_type="polygon", + ) + + self.assertIn(" Date: Thu, 5 Mar 2026 15:51:19 +0700 Subject: [PATCH 2/6] fix(spp_api_v2_gis): set development_status to Alpha --- spp_api_v2_gis/__manifest__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spp_api_v2_gis/__manifest__.py b/spp_api_v2_gis/__manifest__.py index 4328fa08..b0ef9bc1 100644 --- a/spp_api_v2_gis/__manifest__.py +++ b/spp_api_v2_gis/__manifest__.py @@ -7,7 +7,7 @@ "author": "OpenSPP.org", "website": "https://github.com/OpenSPP/OpenSPP2", "license": "LGPL-3", - "development_status": "Production/Stable", + "development_status": "Alpha", "maintainers": ["jeremi", "gonzalesedwin1123", "reichie020212"], "depends": [ "spp_api_v2", From f4f16ab73d7892415e74ccae7d54061bb75a6c7d Mon Sep 17 00:00:00 2001 From: Jeremi Joslin Date: Thu, 5 Mar 2026 20:55:19 +0700 Subject: [PATCH 3/6] fix: resolve CI pre-commit and auth audit failures --- scripts/audit-api-auth.py | 2 + spp_api_v2_gis/README.rst | 154 +++++++++---- spp_api_v2_gis/__manifest__.py | 32 --- spp_api_v2_gis/models/geofence.py | 4 +- spp_api_v2_gis/readme/DESCRIPTION.md | 12 +- spp_api_v2_gis/services/layers_service.py | 6 +- .../services/qml_template_service.py | 2 +- .../services/spatial_query_service.py | 10 +- spp_api_v2_gis/static/description/index.html | 202 ++++++++++++++---- 9 files changed, 289 insertions(+), 135 deletions(-) diff --git a/scripts/audit-api-auth.py b/scripts/audit-api-auth.py index f93acbfc..7a72ee52 100755 --- a/scripts/audit-api-auth.py +++ b/scripts/audit-api-auth.py @@ -80,6 +80,8 @@ ("spp_encryption_rest_api", "well_known.py", "*"), # FastAPI demo router (development only) ("fastapi", "demo_router.py", "*"), + # OGC OPTIONS endpoint - CORS preflight, public by design + ("spp_api_v2_gis", "ogc_features.py", "options_collection_items"), } diff --git a/spp_api_v2_gis/README.rst b/spp_api_v2_gis/README.rst index c7e1a7d1..14c61f2e 100644 --- a/spp_api_v2_gis/README.rst +++ b/spp_api_v2_gis/README.rst @@ -1,12 +1,8 @@ -.. image:: https://odoo-community.org/readme-banner-image - :target: https://odoo-community.org/get-involved?utm_source=readme - :alt: Odoo Community Association - =============== OpenSPP GIS API =============== -.. +.. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! !! This file is generated by oca-gen-addon-readme !! !! changes will be overwritten. !! @@ -20,24 +16,25 @@ OpenSPP GIS API .. |badge2| image:: https://img.shields.io/badge/license-LGPL--3-blue.png :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html :alt: License: LGPL-3 -.. |badge3| image:: https://img.shields.io/badge/github-OpenSPP%2Fopenspp--modules-lightgray.png?logo=github - :target: https://github.com/OpenSPP/openspp-modules/tree/19.0/spp_api_v2_gis - :alt: OpenSPP/openspp-modules +.. |badge3| image:: https://img.shields.io/badge/github-OpenSPP%2FOpenSPP2-lightgray.png?logo=github + :target: https://github.com/OpenSPP/OpenSPP2/tree/19.0/spp_api_v2_gis + :alt: OpenSPP/OpenSPP2 |badge1| |badge2| |badge3| -REST API for QGIS plugin integration, providing GeoJSON endpoints, -spatial queries, and geofence management. +REST API for QGIS plugin integration, providing OGC API - Features +endpoints, spatial queries, and geofence management. Key Features ------------ -- **Layer Catalog**: List available GIS layers and reports -- **GeoJSON Export**: Get pre-aggregated layer data for QGIS -- **QML Styling**: Fetch QGIS style files for consistent visualization -- **Spatial Queries**: Query registrant statistics within arbitrary - polygons using PostGIS -- **Geofence Management**: Save and manage areas of interest +- **OGC API - Features**: Standards-compliant feature collections + (GovStack GIS BB) +- **GeoJSON Export**: Get pre-aggregated layer data for QGIS +- **QML Styling**: Fetch QGIS style files for consistent visualization +- **Spatial Queries**: Query registrant statistics within arbitrary + polygons using PostGIS +- **Geofence Management**: Save and manage areas of interest Architecture ------------ @@ -45,42 +42,111 @@ Architecture Follows thin client architecture where QGIS displays data and OpenSPP performs all computation: -- All spatial queries executed in PostGIS for performance -- Pre-aggregated data returned to minimize data transfer -- Configuration-driven styling using QML templates -- OAuth 2.0 authentication with scope-based access control +- All spatial queries executed in PostGIS for performance (including + bbox via ST_Intersects) +- Pre-aggregated data returned to minimize data transfer +- Configuration-driven styling using QML templates +- JWT authentication with scope-based access control API Endpoints ------------- -- ``GET /gis/catalog`` - List available layers and reports -- ``GET /gis/layers/{id}`` - Get layer as GeoJSON FeatureCollection -- ``GET /gis/layers/{id}/qml`` - Get QGIS style file -- ``POST /gis/query/statistics`` - Query statistics for polygon -- ``POST /gis/geofences`` - Create geofence -- ``GET /gis/geofences`` - List geofences -- ``GET /gis/geofences/{id}`` - Get single geofence as GeoJSON -- ``DELETE /gis/geofences/{id}`` - Archive geofence -- ``GET /gis/export/geopackage`` - Export layers for offline use - -Required Scopes ---------------- - -- ``gis:read`` - View layers and statistics -- ``gis:geofence`` - Create and manage geofences +**OGC API - Features (primary interface)** + ++-------------------------------------------+--------+------------------------------+ +| Endpoint | Method | Description | ++===========================================+========+==============================+ +| ``/gis/ogc/`` | GET | OGC API landing page | ++-------------------------------------------+--------+------------------------------+ +| ``/gis/ogc/conformance`` | GET | OGC conformance classes | ++-------------------------------------------+--------+------------------------------+ +| ``/gis/ogc/collections`` | GET | List feature collections | ++-------------------------------------------+--------+------------------------------+ +| ``/gis/ogc/collections/{id}`` | GET | Collection metadata | ++-------------------------------------------+--------+------------------------------+ +| ``/gis/ogc/collections/{id}/items`` | GET | Feature items (GeoJSON) | ++-------------------------------------------+--------+------------------------------+ +| ``/gis/ogc/collections/{id}/items/{fid}`` | GET | Single feature | ++-------------------------------------------+--------+------------------------------+ +| ``/gis/ogc/collections/{id}/qml`` | GET | QGIS style file (extension) | ++-------------------------------------------+--------+------------------------------+ + +**Additional endpoints** + +========================== ========== ======================= +Endpoint Method Description +========================== ========== ======================= +``/gis/query/statistics`` POST Query stats for polygon +``/gis/geofences`` POST/GET Geofence management +``/gis/geofences/{id}`` GET/DELETE Single geofence +``/gis/export/geopackage`` GET Export for offline use +========================== ========== ======================= + +Scopes and Data Privacy +----------------------- + +**OAuth Scopes** + ++------------------+--------------+------------------------------------+ +| Scope | Access | Description | ++==================+==============+====================================+ +| ``gis:read`` | Read-only | View collections, layers, | +| | | statistics, export data | ++------------------+--------------+------------------------------------+ +| ``gis:geofence`` | Read + Write | Create and archive geofences (also | +| | | requires ``gis:read`` for listing) | ++------------------+--------------+------------------------------------+ + +**What data is exposed** + +**Aggregated statistics only.** No endpoint in this module returns +individual registrant records. + +- **OGC collections/items**: Return GeoJSON features organized by + administrative area, with pre-computed aggregate values (counts, + percentages). Each feature represents an *area*, not a person. +- **Spatial query statistics** (``POST /gis/query/statistics``): Accepts + a GeoJSON polygon and returns configured aggregate statistics computed + by ``spp.aggregation.service``. Individual registrant IDs are computed + internally for aggregation but are **explicitly stripped** from the + response before it is sent (see ``spatial_query.py``). +- **Exports** (GeoPackage/GeoJSON): Contain the same area-level + aggregated layer data, not registrant-level records. +- **Geofences**: Store only geometry and metadata — no registrant data. + +**Privacy controls** + +- **K-anonymity suppression**: Statistics backed by CEL variables can + apply k-anonymity thresholds. When a cell count falls below the + configured minimum, the value is replaced with a suppression marker + and flagged as ``"suppressed": true`` in the response. This prevents + re-identification in small populations. +- **CEL variable configuration**: Administrators control which + statistics are published and their suppression thresholds via + ``spp.statistic`` records. +- **Scope separation**: ``gis:read`` and ``gis:geofence`` are separate + scopes, allowing clients to be granted read-only access without write + capability. + +**Design rationale** + +This module follows a **thin client** architecture: QGIS (or any +OGC-compatible client) displays pre-aggregated data, while OpenSPP +retains all individual-level data server-side. This ensures that GIS API +clients — including the QGIS plugin — never need access to personally +identifiable information. Dependencies ------------ -- ``spp_api_v2`` - FastAPI infrastructure -- ``spp_gis`` - PostGIS integration -- ``spp_gis_report`` - Report configuration -- ``spp_area`` - Administrative area data +- ``spp_api_v2`` - FastAPI infrastructure +- ``spp_gis`` - PostGIS integration +- ``spp_gis_report`` - Report configuration +- ``spp_area`` - Administrative area data .. IMPORTANT:: This is an alpha version, the data model and design can change at any time without warning. Only for development or testing purpose, do not use in production. - `More details on development status `_ **Table of contents** @@ -90,10 +156,10 @@ Dependencies Bug Tracker =========== -Bugs are tracked on `GitHub Issues `_. +Bugs are tracked on `GitHub Issues `_. In case of trouble, please check there if your issue has already been reported. If you spotted it first, help us to smash it by providing a detailed and welcomed -`feedback `_. +`feedback `_. Do not contact contributors directly about support or help with technical issues. @@ -122,6 +188,6 @@ Current maintainers: |maintainer-jeremi| |maintainer-gonzalesedwin1123| |maintainer-reichie020212| -This module is part of the `OpenSPP/openspp-modules `_ project on GitHub. +This module is part of the `OpenSPP/OpenSPP2 `_ project on GitHub. -You are welcome to contribute. +You are welcome to contribute. \ No newline at end of file diff --git a/spp_api_v2_gis/__manifest__.py b/spp_api_v2_gis/__manifest__.py index b0ef9bc1..ffdc88ba 100644 --- a/spp_api_v2_gis/__manifest__.py +++ b/spp_api_v2_gis/__manifest__.py @@ -31,36 +31,4 @@ "summary": """ OGC API - Features compliant GIS endpoints for QGIS and GovStack GIS BB. """, - "description": """ -OpenSPP GIS API -=============== - -Extends OpenSPP API V2 with OGC API - Features compliant endpoints, -enabling GovStack GIS Building Block compliance and interoperability -with any OGC client (QGIS, ArcGIS, Leaflet, ogr2ogr, etc.). - -OGC API - Features Endpoints ------------------------------ -- ``GET /gis/ogc/`` - Landing page -- ``GET /gis/ogc/conformance`` - Conformance declaration -- ``GET /gis/ogc/collections`` - List feature collections -- ``GET /gis/ogc/collections/{id}`` - Collection metadata -- ``GET /gis/ogc/collections/{id}/items`` - Feature items (GeoJSON) -- ``GET /gis/ogc/collections/{id}/items/{fid}`` - Single feature -- ``GET /gis/ogc/collections/{id}/qml`` - QGIS style file (extension) - -Proprietary Endpoints ---------------------- -- ``POST /gis/query/statistics`` - Spatial statistics query -- ``CRUD /gis/geofences`` - Manage saved areas of interest -- ``GET /gis/export/geopackage`` - Export layers for offline use - -Design Principles ------------------ -- OGC API - Features Core + GeoJSON conformance -- Thin client architecture (QGIS displays, OpenSPP computes) -- Pre-aggregated data for performance -- PostGIS spatial queries -- Requires authentication via OAuth 2.0 - """, } diff --git a/spp_api_v2_gis/models/geofence.py b/spp_api_v2_gis/models/geofence.py index 681913e3..0b62bc90 100644 --- a/spp_api_v2_gis/models/geofence.py +++ b/spp_api_v2_gis/models/geofence.py @@ -136,7 +136,7 @@ def _compute_area_sqkm(self): result = self.env.cr.fetchone() rec.area_sqkm = result[0] if result else 0.0 except Exception as e: - _logger.warning("Failed to compute area for geofence %s: %s", rec.name, str(e)) + _logger.warning("Failed to compute area for geofence %s: %s", rec.id, str(e)) rec.area_sqkm = 0.0 @api.constrains("name", "active") @@ -186,7 +186,7 @@ def to_geojson(self): try: geometry_dict = mapping(self.geometry) except Exception as e: - _logger.warning("Failed to convert geometry to GeoJSON for geofence %s: %s", self.name, str(e)) + _logger.warning("Failed to convert geometry to GeoJSON for geofence %s: %s", self.id, str(e)) geometry_dict = None return { diff --git a/spp_api_v2_gis/readme/DESCRIPTION.md b/spp_api_v2_gis/readme/DESCRIPTION.md index 258533b5..5450afcd 100644 --- a/spp_api_v2_gis/readme/DESCRIPTION.md +++ b/spp_api_v2_gis/readme/DESCRIPTION.md @@ -19,7 +19,7 @@ Follows thin client architecture where QGIS displays data and OpenSPP performs a ## API Endpoints -### OGC API - Features (primary interface) +**OGC API - Features (primary interface)** | Endpoint | Method | Description | |----------|--------|-------------| @@ -31,7 +31,7 @@ Follows thin client architecture where QGIS displays data and OpenSPP performs a | `/gis/ogc/collections/{id}/items/{fid}` | GET | Single feature | | `/gis/ogc/collections/{id}/qml` | GET | QGIS style file (extension) | -### Additional endpoints +**Additional endpoints** | Endpoint | Method | Description | |----------|--------|-------------| @@ -42,14 +42,14 @@ Follows thin client architecture where QGIS displays data and OpenSPP performs a ## Scopes and Data Privacy -### OAuth Scopes +**OAuth Scopes** | Scope | Access | Description | |-------|--------|-------------| | `gis:read` | Read-only | View collections, layers, statistics, export data | | `gis:geofence` | Read + Write | Create and archive geofences (also requires `gis:read` for listing) | -### What data is exposed +**What data is exposed** **Aggregated statistics only.** No endpoint in this module returns individual registrant records. @@ -58,13 +58,13 @@ Follows thin client architecture where QGIS displays data and OpenSPP performs a - **Exports** (GeoPackage/GeoJSON): Contain the same area-level aggregated layer data, not registrant-level records. - **Geofences**: Store only geometry and metadata — no registrant data. -### Privacy controls +**Privacy controls** - **K-anonymity suppression**: Statistics backed by CEL variables can apply k-anonymity thresholds. When a cell count falls below the configured minimum, the value is replaced with a suppression marker and flagged as `"suppressed": true` in the response. This prevents re-identification in small populations. - **CEL variable configuration**: Administrators control which statistics are published and their suppression thresholds via `spp.statistic` records. - **Scope separation**: `gis:read` and `gis:geofence` are separate scopes, allowing clients to be granted read-only access without write capability. -### Design rationale +**Design rationale** This module follows a **thin client** architecture: QGIS (or any OGC-compatible client) displays pre-aggregated data, while OpenSPP retains all individual-level data server-side. This ensures that GIS API clients — including the QGIS plugin — never need access to personally identifiable information. diff --git a/spp_api_v2_gis/services/layers_service.py b/spp_api_v2_gis/services/layers_service.py index 1615e043..5eadcce9 100644 --- a/spp_api_v2_gis/services/layers_service.py +++ b/spp_api_v2_gis/services/layers_service.py @@ -300,7 +300,7 @@ def _get_data_layer_geojson(self, layer_id, include_geometry=True, limit=None, o "styling": styling, } - _logger.info("Generated GeoJSON for layer: %s with %d features", layer.name, len(features)) + _logger.info("Generated GeoJSON for layer: %s with %d features", layer.id, len(features)) return geojson @@ -318,7 +318,7 @@ def _fetch_layer_features(self, layer, include_geometry, limit=None, offset=0, b list: List of GeoJSON features """ if not layer.model_name or not layer.geo_field_id: - _logger.warning("Layer %s has no model or geo field configured", layer.name) + _logger.warning("Layer %s has no model or geo field configured", layer.id) return [] Model = self.env[layer.model_name].sudo() @@ -331,7 +331,7 @@ def _fetch_layer_features(self, layer, include_geometry, limit=None, offset=0, b domain = literal_eval(layer.domain) except (ValueError, SyntaxError) as e: - _logger.warning("Invalid domain on layer %s: %s", layer.name, e) + _logger.warning("Invalid domain on layer %s: %s", layer.id, e) # Apply bbox spatial filter via PostGIS ST_Intersects if bbox: diff --git a/spp_api_v2_gis/services/qml_template_service.py b/spp_api_v2_gis/services/qml_template_service.py index 8bdd39ec..5e7af9f8 100644 --- a/spp_api_v2_gis/services/qml_template_service.py +++ b/spp_api_v2_gis/services/qml_template_service.py @@ -122,7 +122,7 @@ def _generate_graduated_polygon( # Get thresholds thresholds = report.threshold_ids.sorted("sequence") if not thresholds: - _logger.warning("No thresholds defined for report: %s", report.name) + _logger.warning("No thresholds defined for report: %s", report.id) # Generate default single-class QML return self._generate_default_polygon(template, field_name, opacity) diff --git a/spp_api_v2_gis/services/spatial_query_service.py b/spp_api_v2_gis/services/spatial_query_service.py index 1caf9002..3798ca42 100644 --- a/spp_api_v2_gis/services/spatial_query_service.py +++ b/spp_api_v2_gis/services/spatial_query_service.py @@ -207,7 +207,7 @@ def _query_by_coordinates(self, geometry_json, filters): p.coordinates, ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326) ) - """ + """ # nosec B608 - SQL clauses built from hardcoded fragments, data uses %s params # Add geometry parameter at the beginning params = [geometry_json] + params[1:] @@ -299,7 +299,7 @@ def _query_by_area(self, geometry_json, filters): ) ) ){extra_where} - """ + """ # nosec B608 - SQL clauses built from hardcoded fragments, data uses %s params params = [area_tuple, area_tuple] + extra_params self.env.cr.execute(registrants_query, params) @@ -623,7 +623,7 @@ def _proximity_by_coordinates(self, reference_points, radius_meters, relation, f SELECT 1 FROM _prox_ref_points r WHERE ST_Intersects(p.coordinates, r.buffer_geom) ){extra_where} - """ + """ # nosec B608 - SQL clauses built from hardcoded fragments, data uses %s params else: # "beyond": find all registrants with coords, minus those within query = f""" @@ -641,7 +641,7 @@ def _proximity_by_coordinates(self, reference_points, radius_meters, relation, f WHERE ST_Intersects(p2.coordinates, r.buffer_geom) ) ){extra_where} - """ + """ # nosec B608 - SQL clauses built from hardcoded fragments, data uses %s params self.env.cr.execute(query, extra_params) registrant_ids = [row[0] for row in self.env.cr.fetchall()] @@ -726,7 +726,7 @@ def _proximity_by_area(self, reference_points, radius_meters, relation, filters) ) ) ){extra_where} - """ + """ # nosec B608 - SQL clauses built from hardcoded fragments, data uses %s params params = [area_tuple, area_tuple] + extra_params self.env.cr.execute(registrants_query, params) diff --git a/spp_api_v2_gis/static/description/index.html b/spp_api_v2_gis/static/description/index.html index 5f665740..5da61de2 100644 --- a/spp_api_v2_gis/static/description/index.html +++ b/spp_api_v2_gis/static/description/index.html @@ -3,7 +3,7 @@ -README.rst +OpenSPP GIS API -
+
+

OpenSPP GIS API

- - -Odoo Community Association - -
-

OpenSPP GIS API

-

Alpha License: LGPL-3 OpenSPP/openspp-modules

-

REST API for QGIS plugin integration, providing GeoJSON endpoints, -spatial queries, and geofence management.

+

Alpha License: LGPL-3 OpenSPP/OpenSPP2

+

REST API for QGIS plugin integration, providing OGC API - Features +endpoints, spatial queries, and geofence management.

-

Key Features

+

Key Features

    -
  • Layer Catalog: List available GIS layers and reports
  • +
  • OGC API - Features: Standards-compliant feature collections +(GovStack GIS BB)
  • GeoJSON Export: Get pre-aggregated layer data for QGIS
  • QML Styling: Fetch QGIS style files for consistent visualization
  • Spatial Queries: Query registrant statistics within arbitrary @@ -389,39 +385,163 @@

    Key Features

-

Architecture

+

Architecture

Follows thin client architecture where QGIS displays data and OpenSPP performs all computation:

    -
  • All spatial queries executed in PostGIS for performance
  • +
  • All spatial queries executed in PostGIS for performance (including +bbox via ST_Intersects)
  • Pre-aggregated data returned to minimize data transfer
  • Configuration-driven styling using QML templates
  • -
  • OAuth 2.0 authentication with scope-based access control
  • +
  • JWT authentication with scope-based access control
-

API Endpoints

+

API Endpoints

+

OGC API - Features (primary interface)

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
EndpointMethodDescription
/gis/ogc/GETOGC API landing page
/gis/ogc/conformanceGETOGC conformance classes
/gis/ogc/collectionsGETList feature collections
/gis/ogc/collections/{id}GETCollection metadata
/gis/ogc/collections/{id}/itemsGETFeature items (GeoJSON)
/gis/ogc/collections/{id}/items/{fid}GETSingle feature
/gis/ogc/collections/{id}/qmlGETQGIS style file (extension)
+

Additional endpoints

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
EndpointMethodDescription
/gis/query/statisticsPOSTQuery stats for polygon
/gis/geofencesPOST/GETGeofence management
/gis/geofences/{id}GET/DELETESingle geofence
/gis/export/geopackageGETExport for offline use
+
+
+

Scopes and Data Privacy

+

OAuth Scopes

+ +++++ + + + + + + + + + + + + + + + + +
ScopeAccessDescription
gis:readRead-onlyView collections, layers, +statistics, export data
gis:geofenceRead + WriteCreate and archive geofences (also +requires gis:read for listing)
+

What data is exposed

+

Aggregated statistics only. No endpoint in this module returns +individual registrant records.

    -
  • GET /gis/catalog - List available layers and reports
  • -
  • GET /gis/layers/{id} - Get layer as GeoJSON FeatureCollection
  • -
  • GET /gis/layers/{id}/qml - Get QGIS style file
  • -
  • POST /gis/query/statistics - Query statistics for polygon
  • -
  • POST /gis/geofences - Create geofence
  • -
  • GET /gis/geofences - List geofences
  • -
  • GET /gis/geofences/{id} - Get single geofence as GeoJSON
  • -
  • DELETE /gis/geofences/{id} - Archive geofence
  • -
  • GET /gis/export/geopackage - Export layers for offline use
  • +
  • OGC collections/items: Return GeoJSON features organized by +administrative area, with pre-computed aggregate values (counts, +percentages). Each feature represents an area, not a person.
  • +
  • Spatial query statistics (POST /gis/query/statistics): Accepts +a GeoJSON polygon and returns configured aggregate statistics computed +by spp.aggregation.service. Individual registrant IDs are computed +internally for aggregation but are explicitly stripped from the +response before it is sent (see spatial_query.py).
  • +
  • Exports (GeoPackage/GeoJSON): Contain the same area-level +aggregated layer data, not registrant-level records.
  • +
  • Geofences: Store only geometry and metadata — no registrant data.
-
-
-

Required Scopes

+

Privacy controls

    -
  • gis:read - View layers and statistics
  • -
  • gis:geofence - Create and manage geofences
  • +
  • K-anonymity suppression: Statistics backed by CEL variables can +apply k-anonymity thresholds. When a cell count falls below the +configured minimum, the value is replaced with a suppression marker +and flagged as "suppressed": true in the response. This prevents +re-identification in small populations.
  • +
  • CEL variable configuration: Administrators control which +statistics are published and their suppression thresholds via +spp.statistic records.
  • +
  • Scope separation: gis:read and gis:geofence are separate +scopes, allowing clients to be granted read-only access without write +capability.
+

Design rationale

+

This module follows a thin client architecture: QGIS (or any +OGC-compatible client) displays pre-aggregated data, while OpenSPP +retains all individual-level data server-side. This ensures that GIS API +clients — including the QGIS plugin — never need access to personally +identifiable information.

-

Dependencies

+

Dependencies

  • spp_api_v2 - FastAPI infrastructure
  • spp_gis - PostGIS integration
  • @@ -431,8 +551,7 @@

    Dependencies

    Important

    This is an alpha version, the data model and design can change at any time without warning. -Only for development or testing purpose, do not use in production. -More details on development status

    +Only for development or testing purpose, do not use in production.

    Table of contents

    @@ -442,31 +561,30 @@

    Dependencies

-

Bug Tracker

-

Bugs are tracked on GitHub Issues. +

Bug Tracker

+

Bugs are tracked on GitHub Issues. In case of trouble, please check there if your issue has already been reported. If you spotted it first, help us to smash it by providing a detailed and welcomed -feedback.

+feedback.

Do not contact contributors directly about support or help with technical issues.

-

Authors

+

Authors

  • OpenSPP.org
-

Maintainers

+

Maintainers

Current maintainers:

jeremi gonzalesedwin1123 reichie020212

-

This module is part of the OpenSPP/openspp-modules project on GitHub.

+

This module is part of the OpenSPP/OpenSPP2 project on GitHub.

You are welcome to contribute.

-
From e65a59325a4f71b2728b2988d62354c1a3ed7e1a Mon Sep 17 00:00:00 2001 From: Jeremi Joslin Date: Thu, 5 Mar 2026 21:29:15 +0700 Subject: [PATCH 4/6] fix: suppress remaining semgrep findings in API services --- spp_api_v2_gis/models/geofence.py | 2 +- spp_api_v2_gis/services/catalog_service.py | 6 +++--- spp_api_v2_gis/services/ogc_service.py | 2 +- spp_api_v2_gis/services/spatial_query_service.py | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/spp_api_v2_gis/models/geofence.py b/spp_api_v2_gis/models/geofence.py index 0b62bc90..2708bbf0 100644 --- a/spp_api_v2_gis/models/geofence.py +++ b/spp_api_v2_gis/models/geofence.py @@ -203,7 +203,7 @@ def _get_geojson_properties(self): """ self.ensure_one() - return { + return { # nosemgrep: odoo-expose-database-id "id": self.id, "name": self.name, "description": self.description or "", diff --git a/spp_api_v2_gis/services/catalog_service.py b/spp_api_v2_gis/services/catalog_service.py index 2f0c32ef..7d7e6fbd 100644 --- a/spp_api_v2_gis/services/catalog_service.py +++ b/spp_api_v2_gis/services/catalog_service.py @@ -35,7 +35,7 @@ def _get_reports(self): Returns: list[dict]: List of report info dictionaries """ - Report = self.env["spp.gis.report"].sudo() + Report = self.env["spp.gis.report"].sudo() # nosemgrep: odoo-sudo-without-context reports = Report.search([("active", "=", True)], order="sequence, name") result = [] @@ -46,7 +46,7 @@ def _get_reports(self): # Query distinct area levels that have data for this report groups = ( self.env["spp.gis.report.data"] - .sudo() + .sudo() # nosemgrep: odoo-sudo-without-context ._read_group( [("report_id", "=", report.id)], groupby=["area_level"], @@ -77,7 +77,7 @@ def _get_data_layers(self): Returns: list[dict]: List of data layer info dictionaries """ - Layer = self.env["spp.gis.data.layer"].sudo() + Layer = self.env["spp.gis.data.layer"].sudo() # nosemgrep: odoo-sudo-without-context layers = Layer.search([], order="sequence, name") result = [] diff --git a/spp_api_v2_gis/services/ogc_service.py b/spp_api_v2_gis/services/ogc_service.py index 7279fcff..af8ab27f 100644 --- a/spp_api_v2_gis/services/ogc_service.py +++ b/spp_api_v2_gis/services/ogc_service.py @@ -509,7 +509,7 @@ def _get_report_base_level(self, report_code): Returns: int: base_area_level or None if report not found """ - report = self.env["spp.gis.report"].sudo().search([("code", "=", report_code)], limit=1) + report = self.env["spp.gis.report"].sudo().search([("code", "=", report_code)], limit=1) # nosemgrep: odoo-sudo-without-context if report: return report.base_area_level return None diff --git a/spp_api_v2_gis/services/spatial_query_service.py b/spp_api_v2_gis/services/spatial_query_service.py index 3798ca42..bdf91b66 100644 --- a/spp_api_v2_gis/services/spatial_query_service.py +++ b/spp_api_v2_gis/services/spatial_query_service.py @@ -358,7 +358,7 @@ def _compute_via_aggregation_service(self, registrant_ids, variables): statistics_to_compute = variables if not statistics_to_compute: # Use GIS-published statistics - Statistic = self.env["spp.statistic"].sudo() + Statistic = self.env["spp.statistic"].sudo() # nosemgrep: odoo-sudo-without-context gis_stats = Statistic.get_published_for_context("gis") statistics_to_compute = [stat.name for stat in gis_stats] if gis_stats else None @@ -399,7 +399,7 @@ def _convert_aggregation_result(self, agg_result, registrant_ids=None): result = {} grouped_stats = {} - Statistic = self.env["spp.statistic"].sudo() + Statistic = self.env["spp.statistic"].sudo() # nosemgrep: odoo-sudo-without-context statistic_by_name = {stat.name: stat for stat in Statistic.search([("name", "in", list(statistics.keys()))])} for stat_name, stat_data in statistics.items(): From 7bbbe09bae6b4049c042b450c911501fac66a556 Mon Sep 17 00:00:00 2001 From: Jeremi Joslin Date: Thu, 5 Mar 2026 21:47:13 +0700 Subject: [PATCH 5/6] fix: move nosemgrep comments to first line of matched pattern --- spp_api_v2_gis/services/catalog_service.py | 4 +-- spp_api_v2_gis/services/layers_service.py | 30 +++++++++---------- .../services/qml_template_service.py | 2 +- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/spp_api_v2_gis/services/catalog_service.py b/spp_api_v2_gis/services/catalog_service.py index 7d7e6fbd..7ccba452 100644 --- a/spp_api_v2_gis/services/catalog_service.py +++ b/spp_api_v2_gis/services/catalog_service.py @@ -45,8 +45,8 @@ def _get_reports(self): # Query distinct area levels that have data for this report groups = ( - self.env["spp.gis.report.data"] - .sudo() # nosemgrep: odoo-sudo-without-context + self.env["spp.gis.report.data"] # nosemgrep: odoo-sudo-without-context + .sudo() ._read_group( [("report_id", "=", report.id)], groupby=["area_level"], diff --git a/spp_api_v2_gis/services/layers_service.py b/spp_api_v2_gis/services/layers_service.py index 5eadcce9..ed9d5216 100644 --- a/spp_api_v2_gis/services/layers_service.py +++ b/spp_api_v2_gis/services/layers_service.py @@ -200,7 +200,7 @@ def _get_report_geojson_uncached( Returns: dict: GeoJSON FeatureCollection with styling hints """ - Report = self.env["spp.gis.report"].sudo() + Report = self.env["spp.gis.report"].sudo() # nosemgrep: odoo-sudo-without-context report = Report.search([("code", "=", report_code)], limit=1) if not report: @@ -212,13 +212,13 @@ def _get_report_geojson_uncached( # Resolve parent area code parent_area_id = None if parent_area_code: - parent_area = self.env["spp.area"].sudo().search([("code", "=", parent_area_code)], limit=1) + parent_area = self.env["spp.area"].sudo().search([("code", "=", parent_area_code)], limit=1) # nosemgrep: odoo-sudo-without-context parent_area_id = parent_area.id if parent_area else None # Apply bbox spatial filter via PostGIS ST_Intersects if bbox: bbox_geojson = self._bbox_to_geojson(bbox) - matching_areas = self.env["spp.area"].sudo().search([("geo_polygon", "gis_intersects", bbox_geojson)]) + matching_areas = self.env["spp.area"].sudo().search([("geo_polygon", "gis_intersects", bbox_geojson)]) # nosemgrep: odoo-sudo-without-context if area_ids: area_ids = list(set(area_ids) & set(matching_areas.ids)) else: @@ -264,7 +264,7 @@ def _get_data_layer_geojson(self, layer_id, include_geometry=True, limit=None, o except (ValueError, TypeError) as e: raise ValueError(f"Invalid layer_id: {layer_id}") from e - Layer = self.env["spp.gis.data.layer"].sudo() + Layer = self.env["spp.gis.data.layer"].sudo() # nosemgrep: odoo-sudo-without-context layer = Layer.browse(layer_id_int) if not layer.exists(): @@ -321,7 +321,7 @@ def _fetch_layer_features(self, layer, include_geometry, limit=None, offset=0, b _logger.warning("Layer %s has no model or geo field configured", layer.id) return [] - Model = self.env[layer.model_name].sudo() + Model = self.env[layer.model_name].sudo() # nosemgrep: odoo-sudo-without-context # Build domain domain = [] @@ -408,24 +408,24 @@ def get_feature_count(self, layer_id, layer_type="report", admin_level=None): MissingError: If layer not found """ if layer_type == "report": - report = self.env["spp.gis.report"].sudo().search([("code", "=", layer_id)], limit=1) + report = self.env["spp.gis.report"].sudo().search([("code", "=", layer_id)], limit=1) # nosemgrep: odoo-sudo-without-context if not report: raise MissingError(f"Report not found: {layer_id}") domain = [("report_id", "=", report.id)] if admin_level is not None: domain.append(("area_level", "=", admin_level)) - return self.env["spp.gis.report.data"].sudo().search_count(domain) + return self.env["spp.gis.report.data"].sudo().search_count(domain) # nosemgrep: odoo-sudo-without-context elif layer_type == "layer": try: layer_id_int = int(layer_id) except (ValueError, TypeError) as e: raise ValueError(f"Invalid layer_id: {layer_id}") from e - layer = self.env["spp.gis.data.layer"].sudo().browse(layer_id_int) + layer = self.env["spp.gis.data.layer"].sudo().browse(layer_id_int) # nosemgrep: odoo-sudo-without-context if not layer.exists(): raise MissingError(f"Layer not found: {layer_id}") - Model = self.env[layer.model_name].sudo() + Model = self.env[layer.model_name].sudo() # nosemgrep: odoo-sudo-without-context domain = [] if layer.domain: try: @@ -472,12 +472,12 @@ def _get_report_feature_by_id(self, report_code, feature_id): Raises: MissingError: If report or feature not found """ - report = self.env["spp.gis.report"].sudo().search([("code", "=", report_code)], limit=1) + report = self.env["spp.gis.report"].sudo().search([("code", "=", report_code)], limit=1) # nosemgrep: odoo-sudo-without-context if not report: raise MissingError(f"Report not found: {report_code}") data = ( - self.env["spp.gis.report.data"] + self.env["spp.gis.report.data"] # nosemgrep: odoo-sudo-without-context .sudo() .search( [("report_id", "=", report.id), ("area_code", "=", str(feature_id))], @@ -543,7 +543,7 @@ def _get_layer_feature_by_id(self, layer_id, feature_id): except (ValueError, TypeError) as e: raise ValueError(f"Invalid layer_id: {layer_id}") from e - layer = self.env["spp.gis.data.layer"].sudo().browse(layer_id_int) + layer = self.env["spp.gis.data.layer"].sudo().browse(layer_id_int) # nosemgrep: odoo-sudo-without-context if not layer.exists(): raise MissingError(f"Layer not found: {layer_id}") @@ -552,7 +552,7 @@ def _get_layer_feature_by_id(self, layer_id, feature_id): except (ValueError, TypeError) as e: raise MissingError(f"Feature {feature_id} not found in layer {layer_id}") from e - Model = self.env[layer.model_name].sudo() + Model = self.env[layer.model_name].sudo() # nosemgrep: odoo-sudo-without-context record = Model.browse(feature_id_int) if not record.exists(): raise MissingError(f"Feature {feature_id} not found in layer {layer_id}") @@ -578,7 +578,7 @@ def _get_layer_feature_by_id(self, layer_id, feature_id): except (ImportError, Exception) as e: _logger.warning("Failed to parse geometry: %s", e) - return { + return { # nosemgrep: odoo-expose-database-id "type": "Feature", "id": record.id, "properties": properties, @@ -690,7 +690,7 @@ def _resolve_area_codes(self, area_codes): if not area_codes: return None - areas = self.env["spp.area"].sudo().search([("code", "in", area_codes)]) + areas = self.env["spp.area"].sudo().search([("code", "in", area_codes)]) # nosemgrep: odoo-sudo-without-context return areas.ids if areas else None diff --git a/spp_api_v2_gis/services/qml_template_service.py b/spp_api_v2_gis/services/qml_template_service.py index 5e7af9f8..90186fce 100644 --- a/spp_api_v2_gis/services/qml_template_service.py +++ b/spp_api_v2_gis/services/qml_template_service.py @@ -164,7 +164,7 @@ def _compute_level_thresholds(self, report, global_thresholds, field_name, admin """ # Query the data range at this admin level data = ( - self.env["spp.gis.report.data"] + self.env["spp.gis.report.data"] # nosemgrep: odoo-sudo-without-context .sudo() .search( [ From 41bd6c6685a5aed572299d87e272448c71e3af4a Mon Sep 17 00:00:00 2001 From: Jeremi Joslin Date: Thu, 5 Mar 2026 22:01:09 +0700 Subject: [PATCH 6/6] fix: place nosemgrep comments on line before matched pattern --- spp_api_v2_gis/models/geofence.py | 3 +- spp_api_v2_gis/routers/ogc_features.py | 2 + spp_api_v2_gis/routers/statistics.py | 1 + spp_api_v2_gis/services/catalog_service.py | 9 ++-- spp_api_v2_gis/services/export_service.py | 1 + spp_api_v2_gis/services/layers_service.py | 45 ++++++++++++------- spp_api_v2_gis/services/ogc_service.py | 3 +- .../services/qml_template_service.py | 3 +- .../services/spatial_query_service.py | 6 ++- 9 files changed, 50 insertions(+), 23 deletions(-) diff --git a/spp_api_v2_gis/models/geofence.py b/spp_api_v2_gis/models/geofence.py index 2708bbf0..448b30b1 100644 --- a/spp_api_v2_gis/models/geofence.py +++ b/spp_api_v2_gis/models/geofence.py @@ -203,7 +203,8 @@ def _get_geojson_properties(self): """ self.ensure_one() - return { # nosemgrep: odoo-expose-database-id + # nosemgrep: odoo-expose-database-id + return { "id": self.id, "name": self.name, "description": self.description or "", diff --git a/spp_api_v2_gis/routers/ogc_features.py b/spp_api_v2_gis/routers/ogc_features.py index a1a13ab0..5bbf3bc5 100644 --- a/spp_api_v2_gis/routers/ogc_features.py +++ b/spp_api_v2_gis/routers/ogc_features.py @@ -317,6 +317,7 @@ async def get_collection_qml( status_code=status.HTTP_404_NOT_FOUND, detail="QML styles only available for report-based collections", ) + # nosemgrep: odoo-sudo-without-context layer = env["spp.gis.data.layer"].sudo().browse(layer_database_id) if not layer.exists() or not ( hasattr(layer, "source_type") and layer.source_type == "report" and layer.report_id @@ -334,6 +335,7 @@ async def get_collection_qml( if match: report_code = match.group(1) admin_level = int(match.group(2)) + # nosemgrep: odoo-sudo-without-context report = env["spp.gis.report"].sudo().search([("code", "=", report_code)], limit=1) if not report: raise HTTPException( diff --git a/spp_api_v2_gis/routers/statistics.py b/spp_api_v2_gis/routers/statistics.py index 607c82eb..2ffb8c82 100644 --- a/spp_api_v2_gis/routers/statistics.py +++ b/spp_api_v2_gis/routers/statistics.py @@ -45,6 +45,7 @@ async def list_statistics( ) try: + # nosemgrep: odoo-sudo-without-context Statistic = env["spp.statistic"].sudo() stats_by_category = Statistic.get_published_by_category("gis") diff --git a/spp_api_v2_gis/services/catalog_service.py b/spp_api_v2_gis/services/catalog_service.py index 7ccba452..f6c2d076 100644 --- a/spp_api_v2_gis/services/catalog_service.py +++ b/spp_api_v2_gis/services/catalog_service.py @@ -35,7 +35,8 @@ def _get_reports(self): Returns: list[dict]: List of report info dictionaries """ - Report = self.env["spp.gis.report"].sudo() # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + Report = self.env["spp.gis.report"].sudo() reports = Report.search([("active", "=", True)], order="sequence, name") result = [] @@ -45,7 +46,8 @@ def _get_reports(self): # Query distinct area levels that have data for this report groups = ( - self.env["spp.gis.report.data"] # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + self.env["spp.gis.report.data"] .sudo() ._read_group( [("report_id", "=", report.id)], @@ -77,7 +79,8 @@ def _get_data_layers(self): Returns: list[dict]: List of data layer info dictionaries """ - Layer = self.env["spp.gis.data.layer"].sudo() # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + Layer = self.env["spp.gis.data.layer"].sudo() layers = Layer.search([], order="sequence, name") result = [] diff --git a/spp_api_v2_gis/services/export_service.py b/spp_api_v2_gis/services/export_service.py index a8deeee3..1f58547b 100644 --- a/spp_api_v2_gis/services/export_service.py +++ b/spp_api_v2_gis/services/export_service.py @@ -97,6 +97,7 @@ def _collect_layers(self, layer_ids, admin_level): _logger.warning("Failed to collect layer %s: %s", layer_code, e) else: # Export all available reports + # nosemgrep: odoo-sudo-without-context reports = self.env["spp.gis.report"].sudo().search([("active", "=", True)]) for report in reports: try: diff --git a/spp_api_v2_gis/services/layers_service.py b/spp_api_v2_gis/services/layers_service.py index ed9d5216..ffdf0d9f 100644 --- a/spp_api_v2_gis/services/layers_service.py +++ b/spp_api_v2_gis/services/layers_service.py @@ -200,7 +200,8 @@ def _get_report_geojson_uncached( Returns: dict: GeoJSON FeatureCollection with styling hints """ - Report = self.env["spp.gis.report"].sudo() # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + Report = self.env["spp.gis.report"].sudo() report = Report.search([("code", "=", report_code)], limit=1) if not report: @@ -212,13 +213,15 @@ def _get_report_geojson_uncached( # Resolve parent area code parent_area_id = None if parent_area_code: - parent_area = self.env["spp.area"].sudo().search([("code", "=", parent_area_code)], limit=1) # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + parent_area = self.env["spp.area"].sudo().search([("code", "=", parent_area_code)], limit=1) parent_area_id = parent_area.id if parent_area else None # Apply bbox spatial filter via PostGIS ST_Intersects if bbox: bbox_geojson = self._bbox_to_geojson(bbox) - matching_areas = self.env["spp.area"].sudo().search([("geo_polygon", "gis_intersects", bbox_geojson)]) # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + matching_areas = self.env["spp.area"].sudo().search([("geo_polygon", "gis_intersects", bbox_geojson)]) if area_ids: area_ids = list(set(area_ids) & set(matching_areas.ids)) else: @@ -264,7 +267,8 @@ def _get_data_layer_geojson(self, layer_id, include_geometry=True, limit=None, o except (ValueError, TypeError) as e: raise ValueError(f"Invalid layer_id: {layer_id}") from e - Layer = self.env["spp.gis.data.layer"].sudo() # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + Layer = self.env["spp.gis.data.layer"].sudo() layer = Layer.browse(layer_id_int) if not layer.exists(): @@ -321,7 +325,8 @@ def _fetch_layer_features(self, layer, include_geometry, limit=None, offset=0, b _logger.warning("Layer %s has no model or geo field configured", layer.id) return [] - Model = self.env[layer.model_name].sudo() # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + Model = self.env[layer.model_name].sudo() # Build domain domain = [] @@ -408,24 +413,28 @@ def get_feature_count(self, layer_id, layer_type="report", admin_level=None): MissingError: If layer not found """ if layer_type == "report": - report = self.env["spp.gis.report"].sudo().search([("code", "=", layer_id)], limit=1) # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + report = self.env["spp.gis.report"].sudo().search([("code", "=", layer_id)], limit=1) if not report: raise MissingError(f"Report not found: {layer_id}") domain = [("report_id", "=", report.id)] if admin_level is not None: domain.append(("area_level", "=", admin_level)) - return self.env["spp.gis.report.data"].sudo().search_count(domain) # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + return self.env["spp.gis.report.data"].sudo().search_count(domain) elif layer_type == "layer": try: layer_id_int = int(layer_id) except (ValueError, TypeError) as e: raise ValueError(f"Invalid layer_id: {layer_id}") from e - layer = self.env["spp.gis.data.layer"].sudo().browse(layer_id_int) # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + layer = self.env["spp.gis.data.layer"].sudo().browse(layer_id_int) if not layer.exists(): raise MissingError(f"Layer not found: {layer_id}") - Model = self.env[layer.model_name].sudo() # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + Model = self.env[layer.model_name].sudo() domain = [] if layer.domain: try: @@ -472,12 +481,14 @@ def _get_report_feature_by_id(self, report_code, feature_id): Raises: MissingError: If report or feature not found """ - report = self.env["spp.gis.report"].sudo().search([("code", "=", report_code)], limit=1) # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + report = self.env["spp.gis.report"].sudo().search([("code", "=", report_code)], limit=1) if not report: raise MissingError(f"Report not found: {report_code}") data = ( - self.env["spp.gis.report.data"] # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + self.env["spp.gis.report.data"] .sudo() .search( [("report_id", "=", report.id), ("area_code", "=", str(feature_id))], @@ -543,7 +554,8 @@ def _get_layer_feature_by_id(self, layer_id, feature_id): except (ValueError, TypeError) as e: raise ValueError(f"Invalid layer_id: {layer_id}") from e - layer = self.env["spp.gis.data.layer"].sudo().browse(layer_id_int) # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + layer = self.env["spp.gis.data.layer"].sudo().browse(layer_id_int) if not layer.exists(): raise MissingError(f"Layer not found: {layer_id}") @@ -552,7 +564,8 @@ def _get_layer_feature_by_id(self, layer_id, feature_id): except (ValueError, TypeError) as e: raise MissingError(f"Feature {feature_id} not found in layer {layer_id}") from e - Model = self.env[layer.model_name].sudo() # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + Model = self.env[layer.model_name].sudo() record = Model.browse(feature_id_int) if not record.exists(): raise MissingError(f"Feature {feature_id} not found in layer {layer_id}") @@ -578,7 +591,8 @@ def _get_layer_feature_by_id(self, layer_id, feature_id): except (ImportError, Exception) as e: _logger.warning("Failed to parse geometry: %s", e) - return { # nosemgrep: odoo-expose-database-id + # nosemgrep: odoo-expose-database-id + return { "type": "Feature", "id": record.id, "properties": properties, @@ -690,7 +704,8 @@ def _resolve_area_codes(self, area_codes): if not area_codes: return None - areas = self.env["spp.area"].sudo().search([("code", "in", area_codes)]) # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + areas = self.env["spp.area"].sudo().search([("code", "in", area_codes)]) return areas.ids if areas else None diff --git a/spp_api_v2_gis/services/ogc_service.py b/spp_api_v2_gis/services/ogc_service.py index af8ab27f..ea876115 100644 --- a/spp_api_v2_gis/services/ogc_service.py +++ b/spp_api_v2_gis/services/ogc_service.py @@ -509,7 +509,8 @@ def _get_report_base_level(self, report_code): Returns: int: base_area_level or None if report not found """ - report = self.env["spp.gis.report"].sudo().search([("code", "=", report_code)], limit=1) # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + report = self.env["spp.gis.report"].sudo().search([("code", "=", report_code)], limit=1) if report: return report.base_area_level return None diff --git a/spp_api_v2_gis/services/qml_template_service.py b/spp_api_v2_gis/services/qml_template_service.py index 90186fce..b97e566e 100644 --- a/spp_api_v2_gis/services/qml_template_service.py +++ b/spp_api_v2_gis/services/qml_template_service.py @@ -164,7 +164,8 @@ def _compute_level_thresholds(self, report, global_thresholds, field_name, admin """ # Query the data range at this admin level data = ( - self.env["spp.gis.report.data"] # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + self.env["spp.gis.report.data"] .sudo() .search( [ diff --git a/spp_api_v2_gis/services/spatial_query_service.py b/spp_api_v2_gis/services/spatial_query_service.py index bdf91b66..a538f9bf 100644 --- a/spp_api_v2_gis/services/spatial_query_service.py +++ b/spp_api_v2_gis/services/spatial_query_service.py @@ -358,7 +358,8 @@ def _compute_via_aggregation_service(self, registrant_ids, variables): statistics_to_compute = variables if not statistics_to_compute: # Use GIS-published statistics - Statistic = self.env["spp.statistic"].sudo() # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + Statistic = self.env["spp.statistic"].sudo() gis_stats = Statistic.get_published_for_context("gis") statistics_to_compute = [stat.name for stat in gis_stats] if gis_stats else None @@ -399,7 +400,8 @@ def _convert_aggregation_result(self, agg_result, registrant_ids=None): result = {} grouped_stats = {} - Statistic = self.env["spp.statistic"].sudo() # nosemgrep: odoo-sudo-without-context + # nosemgrep: odoo-sudo-without-context + Statistic = self.env["spp.statistic"].sudo() statistic_by_name = {stat.name: stat for stat in Statistic.search([("name", "in", list(statistics.keys()))])} for stat_name, stat_data in statistics.items():