From 7da50f163d051498b1bb423e4d23dcce4e94abc0 Mon Sep 17 00:00:00 2001 From: luseverin Date: Mon, 8 Dec 2025 15:10:52 +0100 Subject: [PATCH 01/34] Return impactForecast object in _return_impact --- climada/engine/impact_calc.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/climada/engine/impact_calc.py b/climada/engine/impact_calc.py index 0586166173..63023ce13d 100644 --- a/climada/engine/impact_calc.py +++ b/climada/engine/impact_calc.py @@ -29,6 +29,8 @@ from climada import CONFIG from climada.engine.impact import Impact +from climada.engine.impact_forecast import ImpactForecast +from climada.hazard.forecast import HazardForecast LOGGER = logging.getLogger(__name__) @@ -217,7 +219,7 @@ def _return_impact(self, imp_mat_gen, save_mat): Returns ------- - Impact + Impact or ImpactForecast Impact Object initialize from the impact matrix See Also @@ -233,9 +235,18 @@ def _return_impact(self, imp_mat_gen, save_mat): else: imp_mat = None at_event, eai_exp, aai_agg = self.stitch_risk_metrics(imp_mat_gen) - return Impact.from_eih( + + impact = Impact.from_eih( self.exposures, self.hazard, at_event, eai_exp, aai_agg, imp_mat ) + if isinstance(self.hazard, HazardForecast): + return ImpactForecast().from_impact( + impact, self.hazard.ensemble_member, self.hazard.lead_time + ) # return ImpactForecast object + else: + return ( + impact # return normal impact object if hazard is not a HazardForecast + ) def _return_empty(self, save_mat): """ From bd3502f7751db10aa89f645683626a3687c4ed87 Mon Sep 17 00:00:00 2001 From: luseverin Date: Mon, 8 Dec 2025 15:12:48 +0100 Subject: [PATCH 02/34] Return impactForecast object in _return_empty --- climada/engine/impact_calc.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/climada/engine/impact_calc.py b/climada/engine/impact_calc.py index 63023ce13d..b680ae03e1 100644 --- a/climada/engine/impact_calc.py +++ b/climada/engine/impact_calc.py @@ -259,7 +259,7 @@ def _return_empty(self, save_mat): Returns ------- - Impact + Impact or ImpactForecast Empty impact object with correct array sizes. """ at_event = np.zeros(self.n_events) @@ -271,9 +271,17 @@ def _return_empty(self, save_mat): ) else: imp_mat = None - return Impact.from_eih( + impact = Impact.from_eih( self.exposures, self.hazard, at_event, eai_exp, aai_agg, imp_mat ) + if isinstance(self.hazard, HazardForecast): + return ImpactForecast().from_impact( + impact, self.hazard.ensemble_member, self.hazard.lead_time + ) # return ImpactForecast object + else: + return ( + impact # return normal impact object if hazard is not a HazardForecast + ) def minimal_exp_gdf( self, impf_col, assign_centroids, ignore_cover, ignore_deductible From dfa0198c49d36c76140a6c0ebb10cb42ccde5ff4 Mon Sep 17 00:00:00 2001 From: luseverin Date: Mon, 8 Dec 2025 16:01:00 +0100 Subject: [PATCH 03/34] Add full impactcalc test for impactForecast --- climada/engine/test/test_impact_calc.py | 88 ++++++++++++++++++++++++- 1 file changed, 87 insertions(+), 1 deletion(-) diff --git a/climada/engine/test/test_impact_calc.py b/climada/engine/test/test_impact_calc.py index bd606c6e19..e95a39355d 100644 --- a/climada/engine/test/test_impact_calc.py +++ b/climada/engine/test/test_impact_calc.py @@ -26,14 +26,17 @@ import geopandas as gpd import numpy as np +import pandas as pd from scipy import sparse from climada import CONFIG from climada.engine import Impact, ImpactCalc from climada.engine.impact_calc import LOGGER as ILOG +from climada.engine.impact_forecast import ImpactForecast from climada.entity import Exposures, ImpactFunc, ImpactFuncSet, ImpfTropCyclone from climada.entity.entity_def import Entity from climada.hazard.base import Centroids, Hazard +from climada.hazard.forecast import HazardForecast from climada.test import get_test_file from climada.util.api_client import Client from climada.util.config import Config @@ -47,7 +50,7 @@ def check_impact(self, imp, haz, exp, aai_agg, eai_exp, at_event, imp_mat_array=None): - """Test properties of imapcts""" + """Test properties of impacts""" self.assertEqual(len(haz.event_id), len(imp.at_event)) self.assertIsInstance(imp, Impact) np.testing.assert_allclose(imp.coord_exp[:, 0], exp.latitude) @@ -302,6 +305,89 @@ def test_calc_impact_RF_pass(self): # fmt: on check_impact(self, impact, haz, exp, aai_agg, eai_exp, at_event, imp_mat_array) + def test_impactForecast(self): + """Test that ImpactForecast is returned correctly""" + lead_time = pd.timedelta_range("1h", periods=6).to_numpy() + member = np.arange(6) + _haz = Hazard.from_hdf5(get_test_file("test_hazard_US_flood_random_locations")) + haz_fc = HazardForecast.from_hazard(_haz, lead_time=lead_time, member=member) + + exp = Exposures.from_hdf5( + get_test_file("test_exposure_US_flood_random_locations") + ) + impf_set = ImpactFuncSet.from_excel( + Path(__file__).parent / "data" / "flood_imp_func_set.xls" + ) + icalc = ImpactCalc(exp, impf_set, haz_fc) + impact = icalc.impact(assign_centroids=False) + aai_agg = 161436.05112960344 + eai_exp = np.array( + [ + 1.61159701e05, + 1.33742847e02, + 0.00000000e00, + 4.21352988e-01, + 1.42185609e02, + 0.00000000e00, + 0.00000000e00, + 0.00000000e00, + ] + ) + at_event = np.array( + [ + 0.00000000e00, + 0.00000000e00, + 9.85233619e04, + 3.41245461e04, + 7.73566566e07, + 0.00000000e00, + 0.00000000e00, + ] + ) + # fmt: off + imp_mat_array = np.array( + [ + [ + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + [ + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + [ + 0.00000000e00, 6.41965663e04, 0.00000000e00, 2.02249434e02, + 3.41245461e04, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + [ + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + 3.41245461e04, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + [ + 7.73566566e07, 0.00000000e00, 0.00000000e00, 0.00000000e00, + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + [ + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + [ + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + ] + ) + # fmt: on + check_impact( + self, impact, haz_fc, exp, aai_agg, eai_exp, at_event, imp_mat_array + ) + + # additional test to check that impact is indeed ImpactForecast + self.assertIsInstance(impact, ImpactForecast) + np.testing.assert_array_equal(impact.lead_time, lead_time) + self.assertIs(impact.lead_time.dtype, lead_time.dtype) + np.testing.assert_array_equal(impact.member, member) + def test_empty_impact(self): """Check that empty impact is returned if no centroids match the exposures""" exp = ENT.exposures.copy() From 59c0e5b4f94087d60363125f7812b17fe513835c Mon Sep 17 00:00:00 2001 From: luseverin Date: Mon, 8 Dec 2025 16:01:31 +0100 Subject: [PATCH 04/34] Correct mistakes in _return_empty and _return_impact --- climada/engine/impact_calc.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/climada/engine/impact_calc.py b/climada/engine/impact_calc.py index b680ae03e1..2e787460b9 100644 --- a/climada/engine/impact_calc.py +++ b/climada/engine/impact_calc.py @@ -240,8 +240,8 @@ def _return_impact(self, imp_mat_gen, save_mat): self.exposures, self.hazard, at_event, eai_exp, aai_agg, imp_mat ) if isinstance(self.hazard, HazardForecast): - return ImpactForecast().from_impact( - impact, self.hazard.ensemble_member, self.hazard.lead_time + return ImpactForecast.from_impact( + impact, self.hazard.lead_time, self.hazard.member ) # return ImpactForecast object else: return ( @@ -275,8 +275,8 @@ def _return_empty(self, save_mat): self.exposures, self.hazard, at_event, eai_exp, aai_agg, imp_mat ) if isinstance(self.hazard, HazardForecast): - return ImpactForecast().from_impact( - impact, self.hazard.ensemble_member, self.hazard.lead_time + return ImpactForecast.from_impact( + impact, self.hazard.lead_time, self.hazard.member ) # return ImpactForecast object else: return ( From 4b5ae95a51b9f91efd14a9d6b1b3833d1960b37b Mon Sep 17 00:00:00 2001 From: luseverin Date: Mon, 8 Dec 2025 16:29:26 +0100 Subject: [PATCH 05/34] Raise value error when computing impact with impact forecast without saving impact matrix --- climada/engine/impact_calc.py | 8 ++++++++ climada/engine/test/test_impact_calc.py | 24 ++++++++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/climada/engine/impact_calc.py b/climada/engine/impact_calc.py index 2e787460b9..c09fde4aec 100644 --- a/climada/engine/impact_calc.py +++ b/climada/engine/impact_calc.py @@ -233,6 +233,10 @@ def _return_impact(self, imp_mat_gen, save_mat): imp_mat, self.hazard.frequency ) else: + if isinstance(self.hazard, HazardForecast): + raise ValueError( + "Saving impact matrix is required when using HazardForecast." + ) imp_mat = None at_event, eai_exp, aai_agg = self.stitch_risk_metrics(imp_mat_gen) @@ -270,6 +274,10 @@ def _return_empty(self, save_mat): (self.n_events, self.n_exp_pnt), dtype=np.float64 ) else: + if isinstance(self.hazard, HazardForecast): + raise ValueError( + "Saving impact matrix is required when using HazardForecast." + ) imp_mat = None impact = Impact.from_eih( self.exposures, self.hazard, at_event, eai_exp, aai_agg, imp_mat diff --git a/climada/engine/test/test_impact_calc.py b/climada/engine/test/test_impact_calc.py index e95a39355d..7c26e55b4a 100644 --- a/climada/engine/test/test_impact_calc.py +++ b/climada/engine/test/test_impact_calc.py @@ -388,6 +388,30 @@ def test_impactForecast(self): self.assertIs(impact.lead_time.dtype, lead_time.dtype) np.testing.assert_array_equal(impact.member, member) + def test_impact_forecast_empty_impmat_error(self): + """Test that error is raised when trying to compute impact forecast + without saving impact matrix + """ + lead_time = pd.timedelta_range("1h", periods=6).to_numpy() + member = np.arange(6) + _haz = Hazard.from_hdf5(get_test_file("test_hazard_US_flood_random_locations")) + haz_fc = HazardForecast.from_hazard(_haz, lead_time=lead_time, member=member) + + exp = Exposures.from_hdf5( + get_test_file("test_exposure_US_flood_random_locations") + ) + impf_set = ImpactFuncSet.from_excel( + Path(__file__).parent / "data" / "flood_imp_func_set.xls" + ) + icalc = ImpactCalc(exp, impf_set, haz_fc) + with self.assertRaises(ValueError) as cm: + icalc.impact(assign_centroids=False, save_mat=False) + the_exception = cm.exception + self.assertEqual( + the_exception.args[0], + "Saving impact matrix is required when using HazardForecast.", + ) + def test_empty_impact(self): """Check that empty impact is returned if no centroids match the exposures""" exp = ENT.exposures.copy() From 9a516e1616eed05af3eacb15b5ae43fd4fd026ef Mon Sep 17 00:00:00 2001 From: Chahan Kropf Date: Mon, 8 Dec 2025 17:24:12 +0100 Subject: [PATCH 06/34] Cosmetics: Improve error message, move test to own class --- climada/engine/impact_calc.py | 14 +- climada/engine/test/test_impact_calc.py | 220 ++++++++++++------------ 2 files changed, 119 insertions(+), 115 deletions(-) diff --git a/climada/engine/impact_calc.py b/climada/engine/impact_calc.py index c09fde4aec..2a5f3e35be 100644 --- a/climada/engine/impact_calc.py +++ b/climada/engine/impact_calc.py @@ -236,6 +236,7 @@ def _return_impact(self, imp_mat_gen, save_mat): if isinstance(self.hazard, HazardForecast): raise ValueError( "Saving impact matrix is required when using HazardForecast." + "Please set save_mat=True." ) imp_mat = None at_event, eai_exp, aai_agg = self.stitch_risk_metrics(imp_mat_gen) @@ -246,11 +247,9 @@ def _return_impact(self, imp_mat_gen, save_mat): if isinstance(self.hazard, HazardForecast): return ImpactForecast.from_impact( impact, self.hazard.lead_time, self.hazard.member - ) # return ImpactForecast object - else: - return ( - impact # return normal impact object if hazard is not a HazardForecast ) + else: + return impact def _return_empty(self, save_mat): """ @@ -277,6 +276,7 @@ def _return_empty(self, save_mat): if isinstance(self.hazard, HazardForecast): raise ValueError( "Saving impact matrix is required when using HazardForecast." + "Please set save_mat=True." ) imp_mat = None impact = Impact.from_eih( @@ -285,11 +285,9 @@ def _return_empty(self, save_mat): if isinstance(self.hazard, HazardForecast): return ImpactForecast.from_impact( impact, self.hazard.lead_time, self.hazard.member - ) # return ImpactForecast object - else: - return ( - impact # return normal impact object if hazard is not a HazardForecast ) + else: + return impact def minimal_exp_gdf( self, impf_col, assign_centroids, ignore_cover, ignore_deductible diff --git a/climada/engine/test/test_impact_calc.py b/climada/engine/test/test_impact_calc.py index 7c26e55b4a..c97f2a1f9d 100644 --- a/climada/engine/test/test_impact_calc.py +++ b/climada/engine/test/test_impact_calc.py @@ -305,113 +305,6 @@ def test_calc_impact_RF_pass(self): # fmt: on check_impact(self, impact, haz, exp, aai_agg, eai_exp, at_event, imp_mat_array) - def test_impactForecast(self): - """Test that ImpactForecast is returned correctly""" - lead_time = pd.timedelta_range("1h", periods=6).to_numpy() - member = np.arange(6) - _haz = Hazard.from_hdf5(get_test_file("test_hazard_US_flood_random_locations")) - haz_fc = HazardForecast.from_hazard(_haz, lead_time=lead_time, member=member) - - exp = Exposures.from_hdf5( - get_test_file("test_exposure_US_flood_random_locations") - ) - impf_set = ImpactFuncSet.from_excel( - Path(__file__).parent / "data" / "flood_imp_func_set.xls" - ) - icalc = ImpactCalc(exp, impf_set, haz_fc) - impact = icalc.impact(assign_centroids=False) - aai_agg = 161436.05112960344 - eai_exp = np.array( - [ - 1.61159701e05, - 1.33742847e02, - 0.00000000e00, - 4.21352988e-01, - 1.42185609e02, - 0.00000000e00, - 0.00000000e00, - 0.00000000e00, - ] - ) - at_event = np.array( - [ - 0.00000000e00, - 0.00000000e00, - 9.85233619e04, - 3.41245461e04, - 7.73566566e07, - 0.00000000e00, - 0.00000000e00, - ] - ) - # fmt: off - imp_mat_array = np.array( - [ - [ - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - [ - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - [ - 0.00000000e00, 6.41965663e04, 0.00000000e00, 2.02249434e02, - 3.41245461e04, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - [ - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - 3.41245461e04, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - [ - 7.73566566e07, 0.00000000e00, 0.00000000e00, 0.00000000e00, - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - [ - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - [ - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - ] - ) - # fmt: on - check_impact( - self, impact, haz_fc, exp, aai_agg, eai_exp, at_event, imp_mat_array - ) - - # additional test to check that impact is indeed ImpactForecast - self.assertIsInstance(impact, ImpactForecast) - np.testing.assert_array_equal(impact.lead_time, lead_time) - self.assertIs(impact.lead_time.dtype, lead_time.dtype) - np.testing.assert_array_equal(impact.member, member) - - def test_impact_forecast_empty_impmat_error(self): - """Test that error is raised when trying to compute impact forecast - without saving impact matrix - """ - lead_time = pd.timedelta_range("1h", periods=6).to_numpy() - member = np.arange(6) - _haz = Hazard.from_hdf5(get_test_file("test_hazard_US_flood_random_locations")) - haz_fc = HazardForecast.from_hazard(_haz, lead_time=lead_time, member=member) - - exp = Exposures.from_hdf5( - get_test_file("test_exposure_US_flood_random_locations") - ) - impf_set = ImpactFuncSet.from_excel( - Path(__file__).parent / "data" / "flood_imp_func_set.xls" - ) - icalc = ImpactCalc(exp, impf_set, haz_fc) - with self.assertRaises(ValueError) as cm: - icalc.impact(assign_centroids=False, save_mat=False) - the_exception = cm.exception - self.assertEqual( - the_exception.args[0], - "Saving impact matrix is required when using HazardForecast.", - ) - def test_empty_impact(self): """Check that empty impact is returned if no centroids match the exposures""" exp = ENT.exposures.copy() @@ -716,6 +609,118 @@ def test_single_exp_zero_mdr(self): check_impact(self, imp, haz, exp, aai_agg, eai_exp, at_event, at_event) +class TestImpactCalcForecast(unittest.TestCase): + """Test impact calc for forecast hazard""" + + def test_impactForecast(self): + """Test that ImpactForecast is returned correctly""" + lead_time = pd.timedelta_range("1h", periods=6).to_numpy() + member = np.arange(6) + _haz = Hazard.from_hdf5(get_test_file("test_hazard_US_flood_random_locations")) + haz_fc = HazardForecast.from_hazard(_haz, lead_time=lead_time, member=member) + + exp = Exposures.from_hdf5( + get_test_file("test_exposure_US_flood_random_locations") + ) + impf_set = ImpactFuncSet.from_excel( + Path(__file__).parent / "data" / "flood_imp_func_set.xls" + ) + icalc = ImpactCalc(exp, impf_set, haz_fc) + impact = icalc.impact(assign_centroids=False) + aai_agg = 161436.05112960344 + eai_exp = np.array( + [ + 1.61159701e05, + 1.33742847e02, + 0.00000000e00, + 4.21352988e-01, + 1.42185609e02, + 0.00000000e00, + 0.00000000e00, + 0.00000000e00, + ] + ) + at_event = np.array( + [ + 0.00000000e00, + 0.00000000e00, + 9.85233619e04, + 3.41245461e04, + 7.73566566e07, + 0.00000000e00, + 0.00000000e00, + ] + ) + # fmt: off + imp_mat_array = np.array( + [ + [ + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + [ + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + [ + 0.00000000e00, 6.41965663e04, 0.00000000e00, 2.02249434e02, + 3.41245461e04, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + [ + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + 3.41245461e04, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + [ + 7.73566566e07, 0.00000000e00, 0.00000000e00, 0.00000000e00, + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + [ + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + [ + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, + ], + ] + ) + # fmt: on + check_impact( + self, impact, haz_fc, exp, aai_agg, eai_exp, at_event, imp_mat_array + ) + + # additional test to check that impact is indeed ImpactForecast + self.assertIsInstance(impact, ImpactForecast) + np.testing.assert_array_equal(impact.lead_time, lead_time) + self.assertIs(impact.lead_time.dtype, lead_time.dtype) + np.testing.assert_array_equal(impact.member, member) + + def test_impact_forecast_empty_impmat_error(self): + """Test that error is raised when trying to compute impact forecast + without saving impact matrix + """ + lead_time = pd.timedelta_range("1h", periods=6).to_numpy() + member = np.arange(6) + _haz = Hazard.from_hdf5(get_test_file("test_hazard_US_flood_random_locations")) + haz_fc = HazardForecast.from_hazard(_haz, lead_time=lead_time, member=member) + + exp = Exposures.from_hdf5( + get_test_file("test_exposure_US_flood_random_locations") + ) + impf_set = ImpactFuncSet.from_excel( + Path(__file__).parent / "data" / "flood_imp_func_set.xls" + ) + icalc = ImpactCalc(exp, impf_set, haz_fc) + with self.assertRaises(ValueError) as cm: + icalc.impact(assign_centroids=False, save_mat=False) + no_impmat_exception = cm.exception + self.assertEqual( + no_impmat_exception.args[0], + "Saving impact matrix is required when using HazardForecast." + "Please set save_mat=True.", + ) + + class TestImpactMatrixCalc(unittest.TestCase): """Verify the computation of the impact matrix""" @@ -1011,6 +1016,7 @@ def test_skip_mat(self, from_eih_mock): # Execute Tests if __name__ == "__main__": TESTS = unittest.TestLoader().loadTestsFromTestCase(TestImpactCalc) + TESTS.addTests(unittest.TestLoader().loadTestsFromTestCase(TestImpactCalcForecast)) TESTS.addTests(unittest.TestLoader().loadTestsFromTestCase(TestReturnImpact)) TESTS.addTests(unittest.TestLoader().loadTestsFromTestCase(TestImpactMatrix)) TESTS.addTests(unittest.TestLoader().loadTestsFromTestCase(TestImpactMatrixCalc)) From 899d8f01c09c66eadeba7cf0249dd430dc66c551 Mon Sep 17 00:00:00 2001 From: luseverin Date: Tue, 9 Dec 2025 11:29:35 +0100 Subject: [PATCH 07/34] add test to check that eai_exp and aai_agg are nan for forecasts --- climada/engine/test/test_impact_calc.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/climada/engine/test/test_impact_calc.py b/climada/engine/test/test_impact_calc.py index c97f2a1f9d..4ed45c17aa 100644 --- a/climada/engine/test/test_impact_calc.py +++ b/climada/engine/test/test_impact_calc.py @@ -720,6 +720,26 @@ def test_impact_forecast_empty_impmat_error(self): "Please set save_mat=True.", ) + def test_impact_forecast_blocked_nonsense_attrs(self): + """Test that nonsense attributes are blocked when computing impact forecast""" + lead_time = pd.timedelta_range("1h", periods=6).to_numpy() + member = np.arange(6) + haz = Hazard.from_hdf5(get_test_file("test_hazard_US_flood_random_locations")) + haz_fc = HazardForecast.from_hazard(haz, lead_time=lead_time, member=member) + + exp = Exposures.from_hdf5( + get_test_file("test_exposure_US_flood_random_locations") + ) + impf_set = ImpactFuncSet.from_excel( + Path(__file__).parent / "data" / "flood_imp_func_set.xls" + ) + impact = ImpactCalc(exp, impf_set, haz_fc).impact( + assign_centroids=False, save_mat=True + ) + assert np.isnan(impact.aai_agg) + assert np.all(np.isnan(impact.eai_exp)) + assert impact.eai_exp.shape == (len(exp.gdf),) + class TestImpactMatrixCalc(unittest.TestCase): """Verify the computation of the impact matrix""" From db32170e2d54b7ea9dba9a310344ff4997a086af Mon Sep 17 00:00:00 2001 From: luseverin Date: Tue, 9 Dec 2025 11:30:10 +0100 Subject: [PATCH 08/34] Write nans for eai_exp and aai_agg when forecast is used --- climada/engine/impact_calc.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/climada/engine/impact_calc.py b/climada/engine/impact_calc.py index 2a5f3e35be..9e524a4d03 100644 --- a/climada/engine/impact_calc.py +++ b/climada/engine/impact_calc.py @@ -232,6 +232,14 @@ def _return_impact(self, imp_mat_gen, save_mat): at_event, eai_exp, aai_agg = self.risk_metrics( imp_mat, self.hazard.frequency ) + if isinstance(self.hazard, HazardForecast): + eai_exp = np.nan * np.ones(eai_exp.shape, dtype=eai_exp.dtype) + aai_agg = np.nan * np.ones(aai_agg.shape, dtype=aai_agg.dtype) + LOGGER.warning( + "eai_exp and aai_agg are undefined with forecasts. " + "Setting them to empty arrays." + ) + else: if isinstance(self.hazard, HazardForecast): raise ValueError( From d2f035f4a870cbdf4eb5fbbc2776779bc6ead20a Mon Sep 17 00:00:00 2001 From: luseverin Date: Tue, 9 Dec 2025 12:17:02 +0100 Subject: [PATCH 09/34] update tests using pytest --- climada/engine/test/test_impact_calc.py | 217 ++++++++++++------------ 1 file changed, 110 insertions(+), 107 deletions(-) diff --git a/climada/engine/test/test_impact_calc.py b/climada/engine/test/test_impact_calc.py index 4ed45c17aa..5957925e47 100644 --- a/climada/engine/test/test_impact_calc.py +++ b/climada/engine/test/test_impact_calc.py @@ -27,6 +27,7 @@ import geopandas as gpd import numpy as np import pandas as pd +import pytest from scipy import sparse from climada import CONFIG @@ -49,6 +50,88 @@ DATA_FOLDER.mkdir(exist_ok=True) +@pytest.fixture(autouse=True) +def exposure_fixture(): + n_exp = 50 + lats = np.linspace(-10, 10, n_exp) + lons = np.linspace(-10, 10, n_exp) + data = gpd.GeoDataFrame( + { + "impf_TC": 1, + "value": 1, + }, + index=range(n_exp), + geometry=gpd.points_from_xy(lons, lats), + crs="EPSG:4326", + ) + exposures = Exposures(data=data) + return exposures + + +@pytest.fixture(autouse=True) +def hazard_fixture(exposure_fixture): + n_events = 10 + centroids = Centroids( + lat=exposure_fixture.gdf.geometry.x, + lon=exposure_fixture.gdf.geometry.y, + ) + intensity = ( + np.ones((n_events, exposure_fixture.gdf.shape[0])) * 50 + ) # uniform intensity + haz = Hazard() + haz.haz_type = "TC" + haz.centroids = centroids + haz.intensity = intensity + haz.frequency = 1 / 10 * np.ones(n_events) # uniform frequency (10 n_events) + return haz + + +@pytest.fixture(autouse=True) +def hazard_forecast_fixture(hazard_fixture): + n_events = hazard_fixture.size + lead_time = pd.timedelta_range("1h", periods=n_events).to_numpy() + member = np.arange(10) + haz_fc = HazardForecast.from_hazard( + hazard=hazard_fixture, + lead_time=lead_time, + member=member, + ) + return haz_fc + + +@pytest.fixture(autouse=True) +def impact_func_set_fixture(exposure_fixture, hazard_fixture): + step_impf = ImpactFunc() + step_impf.id = exposure_fixture.data[f"impf_{hazard_fixture.haz_type}"].unique()[0] + step_impf.haz_type = hazard_fixture.haz_type + step_impf.name = "fixture step function" + step_impf.intensity_unit = "" + step_impf.intensity = np.array([0, 0.495, 0.4955, 0.5, 1, 10]) + step_impf.mdd = np.array([0, 0, 0, 1, 1, 1]) + step_impf.paa = np.sort(np.linspace(1, 1, num=6)) + return ImpactFuncSet([step_impf]) + + +@pytest.fixture(autouse=True) +def impact_calc_fixture(exposure_fixture, hazard_fixture, impact_func_set_fixture): + imp_mat = np.ones( + ( + len(hazard_fixture.event_id), + exposure_fixture.gdf.shape[0], + exposure_fixture.gdf.shape[0], + ) + ) + aai_agg = np.sum(exposure_fixture.gdf["value"]) * hazard_fixture.frequency[0] + eai_exp = np.ones(exposure_fixture.gdf.shape[0]) * hazard_fixture.frequency[0] + at_event = np.ones(hazard_fixture.size) * np.sum(exposure_fixture.gdf["value"]) + return { + "imp_mat": imp_mat, + "aai_agg": aai_agg, + "eai_exp": eai_exp, + "at_event": at_event, + } + + def check_impact(self, imp, haz, exp, aai_agg, eai_exp, at_event, imp_mat_array=None): """Test properties of impacts""" self.assertEqual(len(haz.event_id), len(imp.at_event)) @@ -609,108 +692,34 @@ def test_single_exp_zero_mdr(self): check_impact(self, imp, haz, exp, aai_agg, eai_exp, at_event, at_event) -class TestImpactCalcForecast(unittest.TestCase): +class TestImpactCalcForecast: """Test impact calc for forecast hazard""" - def test_impactForecast(self): + def test_impactForecast_type( + exposure_fixture, + hazard_forecast_fixture, + impact_func_set_fixture, + impact_calc_fixture, + ): """Test that ImpactForecast is returned correctly""" - lead_time = pd.timedelta_range("1h", periods=6).to_numpy() - member = np.arange(6) - _haz = Hazard.from_hdf5(get_test_file("test_hazard_US_flood_random_locations")) - haz_fc = HazardForecast.from_hazard(_haz, lead_time=lead_time, member=member) - exp = Exposures.from_hdf5( - get_test_file("test_exposure_US_flood_random_locations") - ) - impf_set = ImpactFuncSet.from_excel( - Path(__file__).parent / "data" / "flood_imp_func_set.xls" - ) - icalc = ImpactCalc(exp, impf_set, haz_fc) - impact = icalc.impact(assign_centroids=False) - aai_agg = 161436.05112960344 - eai_exp = np.array( - [ - 1.61159701e05, - 1.33742847e02, - 0.00000000e00, - 4.21352988e-01, - 1.42185609e02, - 0.00000000e00, - 0.00000000e00, - 0.00000000e00, - ] - ) - at_event = np.array( - [ - 0.00000000e00, - 0.00000000e00, - 9.85233619e04, - 3.41245461e04, - 7.73566566e07, - 0.00000000e00, - 0.00000000e00, - ] - ) - # fmt: off - imp_mat_array = np.array( - [ - [ - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - [ - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - [ - 0.00000000e00, 6.41965663e04, 0.00000000e00, 2.02249434e02, - 3.41245461e04, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - [ - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - 3.41245461e04, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - [ - 7.73566566e07, 0.00000000e00, 0.00000000e00, 0.00000000e00, - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - [ - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - [ - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - 0.00000000e00, 0.00000000e00, 0.00000000e00, 0.00000000e00, - ], - ] - ) - # fmt: on - check_impact( - self, impact, haz_fc, exp, aai_agg, eai_exp, at_event, imp_mat_array + # check that impact is indeed ImpactForecast + assert isinstance(impact, ImpactForecast) + np.testing.assert_array_equal( + impact.lead_time, hazard_forecast_fixture.lead_time ) + assert impact.lead_time.dtype == hazard_forecast_fixture.lead_time.dtype + np.testing.assert_array_equal(impact.member, hazard_forecast_fixture.member) - # additional test to check that impact is indeed ImpactForecast - self.assertIsInstance(impact, ImpactForecast) - np.testing.assert_array_equal(impact.lead_time, lead_time) - self.assertIs(impact.lead_time.dtype, lead_time.dtype) - np.testing.assert_array_equal(impact.member, member) - - def test_impact_forecast_empty_impmat_error(self): + def test_impact_forecast_empty_impmat_error( + hazard_forecast_fixture, exposure_fixture, impact_func_set_fixture + ): """Test that error is raised when trying to compute impact forecast without saving impact matrix """ - lead_time = pd.timedelta_range("1h", periods=6).to_numpy() - member = np.arange(6) - _haz = Hazard.from_hdf5(get_test_file("test_hazard_US_flood_random_locations")) - haz_fc = HazardForecast.from_hazard(_haz, lead_time=lead_time, member=member) - - exp = Exposures.from_hdf5( - get_test_file("test_exposure_US_flood_random_locations") - ) - impf_set = ImpactFuncSet.from_excel( - Path(__file__).parent / "data" / "flood_imp_func_set.xls" + icalc = ImpactCalc( + exposure_fixture, impact_func_set_fixture, hazard_forecast_fixture ) - icalc = ImpactCalc(exp, impf_set, haz_fc) with self.assertRaises(ValueError) as cm: icalc.impact(assign_centroids=False, save_mat=False) no_impmat_exception = cm.exception @@ -720,25 +729,19 @@ def test_impact_forecast_empty_impmat_error(self): "Please set save_mat=True.", ) - def test_impact_forecast_blocked_nonsense_attrs(self): + def test_impact_forecast_blocked_nonsense_attrs( + hazard_forecast_fixture, exposure_fixture, impact_func_set_fixture + ): """Test that nonsense attributes are blocked when computing impact forecast""" - lead_time = pd.timedelta_range("1h", periods=6).to_numpy() - member = np.arange(6) - haz = Hazard.from_hdf5(get_test_file("test_hazard_US_flood_random_locations")) - haz_fc = HazardForecast.from_hazard(haz, lead_time=lead_time, member=member) + lead_time = hazard_fixture.lead_time + member = hazard_fixture.member - exp = Exposures.from_hdf5( - get_test_file("test_exposure_US_flood_random_locations") - ) - impf_set = ImpactFuncSet.from_excel( - Path(__file__).parent / "data" / "flood_imp_func_set.xls" - ) - impact = ImpactCalc(exp, impf_set, haz_fc).impact( - assign_centroids=False, save_mat=True - ) + impact = ImpactCalc( + exposure_fixture, impact_func_set_fixture, hazard_forecast_fixture + ).impact(assign_centroids=True, save_mat=True) assert np.isnan(impact.aai_agg) assert np.all(np.isnan(impact.eai_exp)) - assert impact.eai_exp.shape == (len(exp.gdf),) + assert impact.eai_exp.shape == (len(exposure_fixture.gdf),) class TestImpactMatrixCalc(unittest.TestCase): From c10a4b349f6bfaa8c07131f8863cfeb66e212503 Mon Sep 17 00:00:00 2001 From: luseverin Date: Tue, 9 Dec 2025 13:52:48 +0100 Subject: [PATCH 10/34] Fix error in test fixtures --- climada/engine/test/test_impact_calc.py | 30 +++++++++++++++---------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/climada/engine/test/test_impact_calc.py b/climada/engine/test/test_impact_calc.py index 5957925e47..73651d219d 100644 --- a/climada/engine/test/test_impact_calc.py +++ b/climada/engine/test/test_impact_calc.py @@ -75,11 +75,15 @@ def hazard_fixture(exposure_fixture): lat=exposure_fixture.gdf.geometry.x, lon=exposure_fixture.gdf.geometry.y, ) - intensity = ( + intensity = sparse.csr_matrix( np.ones((n_events, exposure_fixture.gdf.shape[0])) * 50 ) # uniform intensity haz = Hazard() + haz.event_id = np.arange(n_events) + haz.event_name = haz.event_id haz.haz_type = "TC" + haz.date = haz.event_id + haz.frequency_unit = "m/s" haz.centroids = centroids haz.intensity = intensity haz.frequency = 1 / 10 * np.ones(n_events) # uniform frequency (10 n_events) @@ -696,13 +700,16 @@ class TestImpactCalcForecast: """Test impact calc for forecast hazard""" def test_impactForecast_type( + self, exposure_fixture, hazard_forecast_fixture, impact_func_set_fixture, impact_calc_fixture, ): """Test that ImpactForecast is returned correctly""" - + impact = ImpactCalc( + exposure_fixture, impact_func_set_fixture, hazard_forecast_fixture + ).impact(assign_centroids=True, save_mat=True) # check that impact is indeed ImpactForecast assert isinstance(impact, ImpactForecast) np.testing.assert_array_equal( @@ -712,7 +719,7 @@ def test_impactForecast_type( np.testing.assert_array_equal(impact.member, hazard_forecast_fixture.member) def test_impact_forecast_empty_impmat_error( - hazard_forecast_fixture, exposure_fixture, impact_func_set_fixture + self, hazard_forecast_fixture, exposure_fixture, impact_func_set_fixture ): """Test that error is raised when trying to compute impact forecast without saving impact matrix @@ -720,21 +727,20 @@ def test_impact_forecast_empty_impmat_error( icalc = ImpactCalc( exposure_fixture, impact_func_set_fixture, hazard_forecast_fixture ) - with self.assertRaises(ValueError) as cm: - icalc.impact(assign_centroids=False, save_mat=False) - no_impmat_exception = cm.exception - self.assertEqual( - no_impmat_exception.args[0], + no_impmat_exception = ( "Saving impact matrix is required when using HazardForecast." - "Please set save_mat=True.", + "Please set save_mat=True." ) + with pytest.raises(ValueError) as cm: + icalc.impact(assign_centroids=True, save_mat=False) + assert no_impmat_exception == str(cm.value) def test_impact_forecast_blocked_nonsense_attrs( - hazard_forecast_fixture, exposure_fixture, impact_func_set_fixture + self, hazard_forecast_fixture, exposure_fixture, impact_func_set_fixture ): """Test that nonsense attributes are blocked when computing impact forecast""" - lead_time = hazard_fixture.lead_time - member = hazard_fixture.member + lead_time = hazard_forecast_fixture.lead_time + member = hazard_forecast_fixture.member impact = ImpactCalc( exposure_fixture, impact_func_set_fixture, hazard_forecast_fixture From d3a56429dca1c8eed6c4def5d9faef3b89fb8b3c Mon Sep 17 00:00:00 2001 From: luseverin Date: Tue, 9 Dec 2025 14:02:55 +0100 Subject: [PATCH 11/34] Returns nans for eai_exp and aai_agg when exposures is empty --- climada/engine/impact_calc.py | 8 ++++++-- climada/engine/test/test_impact_calc.py | 12 ++++++++++-- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/climada/engine/impact_calc.py b/climada/engine/impact_calc.py index 9e524a4d03..e8ca9c0c4c 100644 --- a/climada/engine/impact_calc.py +++ b/climada/engine/impact_calc.py @@ -273,9 +273,13 @@ def _return_empty(self, save_mat): Impact or ImpactForecast Empty impact object with correct array sizes. """ + if isinstance(self.hazard, HazardForecast): + eai_exp = np.nan * np.ones(self.n_exp_pnt) + aai_agg = np.nan + else: + eai_exp = np.zeros(self.n_exp_pnt) + aai_agg = 0.0 at_event = np.zeros(self.n_events) - eai_exp = np.zeros(self.n_exp_pnt) - aai_agg = 0.0 if save_mat: imp_mat = sparse.csr_matrix( (self.n_events, self.n_exp_pnt), dtype=np.float64 diff --git a/climada/engine/test/test_impact_calc.py b/climada/engine/test/test_impact_calc.py index 73651d219d..27dd18439d 100644 --- a/climada/engine/test/test_impact_calc.py +++ b/climada/engine/test/test_impact_calc.py @@ -51,8 +51,7 @@ @pytest.fixture(autouse=True) -def exposure_fixture(): - n_exp = 50 +def exposure_fixture(n_exp=50): lats = np.linspace(-10, 10, n_exp) lons = np.linspace(-10, 10, n_exp) data = gpd.GeoDataFrame( @@ -749,6 +748,15 @@ def test_impact_forecast_blocked_nonsense_attrs( assert np.all(np.isnan(impact.eai_exp)) assert impact.eai_exp.shape == (len(exposure_fixture.gdf),) + # test that aai_agg and eai_exp are also nan when 0-size exp + empty_exp = exposure_fixture(n_exp=0) + impact_empty = ImpactCalc( + exposure_fixture, impact_func_set_fixture, hazard_forecast_fixture + ).impact(assign_centroids=True, save_mat=True) + assert np.isnan(impact_empty.aai_agg) + assert np.all(np.isnan(impact_empty.eai_exp)) + assert impact_empty.eai_exp.shape == (len(empty_exp.gdf),) + class TestImpactMatrixCalc(unittest.TestCase): """Verify the computation of the impact matrix""" From d43a46c0810fec0e19376d12d12952c5ee6d505d Mon Sep 17 00:00:00 2001 From: luseverin Date: Tue, 9 Dec 2025 14:34:54 +0100 Subject: [PATCH 12/34] add warning when at_event is used with forecast --- climada/engine/impact_forecast.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/climada/engine/impact_forecast.py b/climada/engine/impact_forecast.py index d4afc551d4..d494882af5 100644 --- a/climada/engine/impact_forecast.py +++ b/climada/engine/impact_forecast.py @@ -88,3 +88,13 @@ def from_impact( imp_mat=impact.imp_mat, haz_type=impact.haz_type, ) + + @property + def at_event(self): + return self._at_event + + @at_event.setter + def at_event(self, value): + """Set the total exposure value close to a hazard""" + LOGGER.warning("at_event for forecasts is not yet implemented.") + self._at_event = value From e1975664348c95d9b87538671a43a8361c4fee9b Mon Sep 17 00:00:00 2001 From: Lukas Riedel <34276446+peanutfun@users.noreply.github.com> Date: Tue, 9 Dec 2025 14:51:23 +0100 Subject: [PATCH 13/34] Update ImpactCalc tests for forecasts --- climada/engine/test/test_impact_calc.py | 149 ++++++++++-------------- 1 file changed, 63 insertions(+), 86 deletions(-) diff --git a/climada/engine/test/test_impact_calc.py b/climada/engine/test/test_impact_calc.py index 27dd18439d..f7dd2ecb75 100644 --- a/climada/engine/test/test_impact_calc.py +++ b/climada/engine/test/test_impact_calc.py @@ -50,8 +50,9 @@ DATA_FOLDER.mkdir(exist_ok=True) -@pytest.fixture(autouse=True) -def exposure_fixture(n_exp=50): +@pytest.fixture(params=[50, 1, 0]) +def exposure(request): + n_exp = request.param lats = np.linspace(-10, 10, n_exp) lons = np.linspace(-10, 10, n_exp) data = gpd.GeoDataFrame( @@ -67,19 +68,19 @@ def exposure_fixture(n_exp=50): return exposures -@pytest.fixture(autouse=True) -def hazard_fixture(exposure_fixture): +@pytest.fixture +def hazard(exposure): n_events = 10 centroids = Centroids( - lat=exposure_fixture.gdf.geometry.x, - lon=exposure_fixture.gdf.geometry.y, + lat=exposure.gdf.geometry.x, + lon=exposure.gdf.geometry.y, ) intensity = sparse.csr_matrix( - np.ones((n_events, exposure_fixture.gdf.shape[0])) * 50 + np.ones((n_events, exposure.gdf.shape[0])) * 50 ) # uniform intensity haz = Hazard() haz.event_id = np.arange(n_events) - haz.event_name = haz.event_id + haz.event_name = haz.event_id.tolist() haz.haz_type = "TC" haz.date = haz.event_id haz.frequency_unit = "m/s" @@ -89,24 +90,28 @@ def hazard_fixture(exposure_fixture): return haz -@pytest.fixture(autouse=True) -def hazard_forecast_fixture(hazard_fixture): - n_events = hazard_fixture.size +@pytest.fixture +def hazard_forecast(hazard): + n_events = hazard.size lead_time = pd.timedelta_range("1h", periods=n_events).to_numpy() - member = np.arange(10) + member = np.arange(n_events) haz_fc = HazardForecast.from_hazard( - hazard=hazard_fixture, + hazard=hazard, lead_time=lead_time, member=member, ) return haz_fc -@pytest.fixture(autouse=True) -def impact_func_set_fixture(exposure_fixture, hazard_fixture): +@pytest.fixture +def impact_func_set(exposure, hazard): step_impf = ImpactFunc() - step_impf.id = exposure_fixture.data[f"impf_{hazard_fixture.haz_type}"].unique()[0] - step_impf.haz_type = hazard_fixture.haz_type + step_impf.id = 1 + try: + step_impf.id = exposure.data[f"impf_{hazard.haz_type}"].unique()[0] + except IndexError: + pass + step_impf.haz_type = hazard.haz_type step_impf.name = "fixture step function" step_impf.intensity_unit = "" step_impf.intensity = np.array([0, 0.495, 0.4955, 0.5, 1, 10]) @@ -115,18 +120,12 @@ def impact_func_set_fixture(exposure_fixture, hazard_fixture): return ImpactFuncSet([step_impf]) -@pytest.fixture(autouse=True) -def impact_calc_fixture(exposure_fixture, hazard_fixture, impact_func_set_fixture): - imp_mat = np.ones( - ( - len(hazard_fixture.event_id), - exposure_fixture.gdf.shape[0], - exposure_fixture.gdf.shape[0], - ) - ) - aai_agg = np.sum(exposure_fixture.gdf["value"]) * hazard_fixture.frequency[0] - eai_exp = np.ones(exposure_fixture.gdf.shape[0]) * hazard_fixture.frequency[0] - at_event = np.ones(hazard_fixture.size) * np.sum(exposure_fixture.gdf["value"]) +@pytest.fixture +def impact_calc(exposure, hazard): + imp_mat = np.ones((len(hazard.event_id), exposure.gdf.shape[0])) + aai_agg = np.sum(exposure.gdf["value"]) * hazard.frequency[0] + eai_exp = np.ones(exposure.gdf.shape[0]) * hazard.frequency[0] + at_event = np.ones(hazard.size) * np.sum(exposure.gdf["value"]) return { "imp_mat": imp_mat, "aai_agg": aai_agg, @@ -135,17 +134,18 @@ def impact_calc_fixture(exposure_fixture, hazard_fixture, impact_func_set_fixtur } -def check_impact(self, imp, haz, exp, aai_agg, eai_exp, at_event, imp_mat_array=None): +def check_impact(imp, haz, exp, aai_agg, eai_exp, at_event, imp_mat_array=None): """Test properties of impacts""" - self.assertEqual(len(haz.event_id), len(imp.at_event)) - self.assertIsInstance(imp, Impact) + # NOTE: Correctly compares NaNs! + assert len(haz.event_id) == len(imp.at_event) + assert isinstance(imp, Impact) np.testing.assert_allclose(imp.coord_exp[:, 0], exp.latitude) np.testing.assert_allclose(imp.coord_exp[:, 1], exp.longitude) - self.assertAlmostEqual(imp.aai_agg, aai_agg, 3) + np.testing.assert_allclose(imp.aai_agg, aai_agg, rtol=1e-3) np.testing.assert_allclose(imp.eai_exp, eai_exp, rtol=1e-5) np.testing.assert_allclose(imp.at_event, at_event, rtol=1e-5) if imp_mat_array is not None: - np.testing.assert_allclose(imp.imp_mat.toarray().ravel(), imp_mat_array.ravel()) + np.testing.assert_allclose(imp.imp_mat.todense(), imp_mat_array) class TestImpactCalc(unittest.TestCase): @@ -389,7 +389,7 @@ def test_calc_impact_RF_pass(self): ] ) # fmt: on - check_impact(self, impact, haz, exp, aai_agg, eai_exp, at_event, imp_mat_array) + check_impact(impact, haz, exp, aai_agg, eai_exp, at_event, imp_mat_array) def test_empty_impact(self): """Check that empty impact is returned if no centroids match the exposures""" @@ -400,11 +400,11 @@ def test_empty_impact(self): aai_agg = 0.0 eai_exp = np.zeros(len(exp.gdf)) at_event = np.zeros(HAZ.size) - check_impact(self, impact, HAZ, exp, aai_agg, eai_exp, at_event, None) + check_impact(impact, HAZ, exp, aai_agg, eai_exp, at_event, None) impact = icalc.impact(save_mat=True, assign_centroids=False) imp_mat_array = sparse.csr_matrix((HAZ.size, len(exp.gdf))).toarray() - check_impact(self, impact, HAZ, exp, aai_agg, eai_exp, at_event, imp_mat_array) + check_impact(impact, HAZ, exp, aai_agg, eai_exp, at_event, imp_mat_array) def test_single_event_impact(self): """Check impact for single event""" @@ -414,11 +414,11 @@ def test_single_event_impact(self): aai_agg = 0.0 eai_exp = np.zeros(len(ENT.exposures.gdf)) at_event = np.array([0]) - check_impact(self, impact, haz, ENT.exposures, aai_agg, eai_exp, at_event, None) + check_impact(impact, haz, ENT.exposures, aai_agg, eai_exp, at_event, None) impact = icalc.impact(save_mat=True, assign_centroids=False) imp_mat_array = sparse.csr_matrix((haz.size, len(ENT.exposures.gdf))).toarray() check_impact( - self, impact, haz, ENT.exposures, aai_agg, eai_exp, at_event, imp_mat_array + impact, haz, ENT.exposures, aai_agg, eai_exp, at_event, imp_mat_array ) def test_calc_impact_save_mat_pass(self): @@ -692,70 +692,47 @@ def test_single_exp_zero_mdr(self): imp = ImpactCalc(exp, impf_set, haz).impact( assign_centroids=False, save_mat=True ) - check_impact(self, imp, haz, exp, aai_agg, eai_exp, at_event, at_event) + check_impact(imp, haz, exp, aai_agg, eai_exp, at_event, at_event) class TestImpactCalcForecast: """Test impact calc for forecast hazard""" - def test_impactForecast_type( + @pytest.fixture + def impact_calc_forecast(self, impact_calc): + """Write NaNs to attributes that are not used""" + impact_calc["aai_agg"] = np.full_like(impact_calc["aai_agg"], np.nan) + impact_calc["eai_exp"] = np.full_like(impact_calc["eai_exp"], np.nan) + + def test_impact_forecast( self, - exposure_fixture, - hazard_forecast_fixture, - impact_func_set_fixture, - impact_calc_fixture, + exposure, + hazard_forecast, + impact_func_set, + impact_calc, + impact_calc_forecast, ): """Test that ImpactForecast is returned correctly""" - impact = ImpactCalc( - exposure_fixture, impact_func_set_fixture, hazard_forecast_fixture - ).impact(assign_centroids=True, save_mat=True) + impact = ImpactCalc(exposure, impact_func_set, hazard_forecast).impact( + assign_centroids=True, save_mat=True + ) # check that impact is indeed ImpactForecast + impact_calc["imp_mat_array"] = impact_calc.pop("imp_mat") + check_impact(imp=impact, haz=hazard_forecast, exp=exposure, **impact_calc) assert isinstance(impact, ImpactForecast) - np.testing.assert_array_equal( - impact.lead_time, hazard_forecast_fixture.lead_time - ) - assert impact.lead_time.dtype == hazard_forecast_fixture.lead_time.dtype - np.testing.assert_array_equal(impact.member, hazard_forecast_fixture.member) + np.testing.assert_array_equal(impact.lead_time, hazard_forecast.lead_time) + assert impact.lead_time.dtype == hazard_forecast.lead_time.dtype + np.testing.assert_array_equal(impact.member, hazard_forecast.member) def test_impact_forecast_empty_impmat_error( - self, hazard_forecast_fixture, exposure_fixture, impact_func_set_fixture + self, hazard_forecast, exposure, impact_func_set ): """Test that error is raised when trying to compute impact forecast without saving impact matrix """ - icalc = ImpactCalc( - exposure_fixture, impact_func_set_fixture, hazard_forecast_fixture - ) - no_impmat_exception = ( - "Saving impact matrix is required when using HazardForecast." - "Please set save_mat=True." - ) - with pytest.raises(ValueError) as cm: + icalc = ImpactCalc(exposure, impact_func_set, hazard_forecast) + with pytest.raises(ValueError, match="Saving impact matrix is required"): icalc.impact(assign_centroids=True, save_mat=False) - assert no_impmat_exception == str(cm.value) - - def test_impact_forecast_blocked_nonsense_attrs( - self, hazard_forecast_fixture, exposure_fixture, impact_func_set_fixture - ): - """Test that nonsense attributes are blocked when computing impact forecast""" - lead_time = hazard_forecast_fixture.lead_time - member = hazard_forecast_fixture.member - - impact = ImpactCalc( - exposure_fixture, impact_func_set_fixture, hazard_forecast_fixture - ).impact(assign_centroids=True, save_mat=True) - assert np.isnan(impact.aai_agg) - assert np.all(np.isnan(impact.eai_exp)) - assert impact.eai_exp.shape == (len(exposure_fixture.gdf),) - - # test that aai_agg and eai_exp are also nan when 0-size exp - empty_exp = exposure_fixture(n_exp=0) - impact_empty = ImpactCalc( - exposure_fixture, impact_func_set_fixture, hazard_forecast_fixture - ).impact(assign_centroids=True, save_mat=True) - assert np.isnan(impact_empty.aai_agg) - assert np.all(np.isnan(impact_empty.eai_exp)) - assert impact_empty.eai_exp.shape == (len(empty_exp.gdf),) class TestImpactMatrixCalc(unittest.TestCase): From 554cfc8cb56b9397fd80fb4ee0a7456f2e731ec9 Mon Sep 17 00:00:00 2001 From: Lukas Riedel <34276446+peanutfun@users.noreply.github.com> Date: Tue, 9 Dec 2025 14:58:12 +0100 Subject: [PATCH 14/34] Review ImpactCalc forecast handling --- climada/engine/impact_calc.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/climada/engine/impact_calc.py b/climada/engine/impact_calc.py index e8ca9c0c4c..f58316bf56 100644 --- a/climada/engine/impact_calc.py +++ b/climada/engine/impact_calc.py @@ -233,11 +233,11 @@ def _return_impact(self, imp_mat_gen, save_mat): imp_mat, self.hazard.frequency ) if isinstance(self.hazard, HazardForecast): - eai_exp = np.nan * np.ones(eai_exp.shape, dtype=eai_exp.dtype) - aai_agg = np.nan * np.ones(aai_agg.shape, dtype=aai_agg.dtype) + eai_exp = np.full_like(eai_exp, np.nan, dtype=eai_exp.dtype) + aai_agg = np.full_like(aai_agg, np.nan, dtype=aai_agg.dtype) LOGGER.warning( "eai_exp and aai_agg are undefined with forecasts. " - "Setting them to empty arrays." + "Setting them to NaN arrays." ) else: @@ -256,8 +256,7 @@ def _return_impact(self, imp_mat_gen, save_mat): return ImpactForecast.from_impact( impact, self.hazard.lead_time, self.hazard.member ) - else: - return impact + return impact def _return_empty(self, save_mat): """ @@ -273,13 +272,14 @@ def _return_empty(self, save_mat): Impact or ImpactForecast Empty impact object with correct array sizes. """ + at_event = np.zeros(self.n_events) if isinstance(self.hazard, HazardForecast): - eai_exp = np.nan * np.ones(self.n_exp_pnt) + eai_exp = np.full(self.n_exp_pnt, np.nan) aai_agg = np.nan else: eai_exp = np.zeros(self.n_exp_pnt) aai_agg = 0.0 - at_event = np.zeros(self.n_events) + if save_mat: imp_mat = sparse.csr_matrix( (self.n_events, self.n_exp_pnt), dtype=np.float64 @@ -287,10 +287,11 @@ def _return_empty(self, save_mat): else: if isinstance(self.hazard, HazardForecast): raise ValueError( - "Saving impact matrix is required when using HazardForecast." + "Saving impact matrix is required when using HazardForecast. " "Please set save_mat=True." ) imp_mat = None + impact = Impact.from_eih( self.exposures, self.hazard, at_event, eai_exp, aai_agg, imp_mat ) @@ -298,8 +299,7 @@ def _return_empty(self, save_mat): return ImpactForecast.from_impact( impact, self.hazard.lead_time, self.hazard.member ) - else: - return impact + return impact def minimal_exp_gdf( self, impf_col, assign_centroids, ignore_cover, ignore_deductible From 30ed14d41a832a1794ff763d175db394d0ba6b76 Mon Sep 17 00:00:00 2001 From: luseverin Date: Tue, 9 Dec 2025 15:00:35 +0100 Subject: [PATCH 15/34] Block local_exceedance_impact --- climada/engine/impact_forecast.py | 21 +++++++++++++++++++++ climada/engine/test/test_impact_forecast.py | 6 ++++++ 2 files changed, 27 insertions(+) diff --git a/climada/engine/impact_forecast.py b/climada/engine/impact_forecast.py index d494882af5..e748840f21 100644 --- a/climada/engine/impact_forecast.py +++ b/climada/engine/impact_forecast.py @@ -98,3 +98,24 @@ def at_event(self, value): """Set the total exposure value close to a hazard""" LOGGER.warning("at_event for forecasts is not yet implemented.") self._at_event = value + + def local_exceedance_impact( + self, + return_periods=(25, 50, 100, 250), + method="interpolate", + min_impact=0, + log_frequency=True, + log_impact=True, + bin_decimals=None, + ): + """Compution of local exceedance impact for given return periods is not + implemented for ImpactForecast. See climada.engine.impact.Impact for details. + Returns + ------- + NotImplementedError + """ + + LOGGER.error("local_exceedance_impact is not defined for ImpactForecast") + raise NotImplementedError( + "local_exceedance_impact is not defined for ImpactForecast" + ) diff --git a/climada/engine/test/test_impact_forecast.py b/climada/engine/test/test_impact_forecast.py index 0d421152c2..1657ff8a50 100644 --- a/climada/engine/test/test_impact_forecast.py +++ b/climada/engine/test/test_impact_forecast.py @@ -103,3 +103,9 @@ def test_impact_forecast_concat(impact_forecast, member): [impact_forecast, impact_forecast], reset_event_ids=True ) npt.assert_array_equal(impact_fc.member, np.concatenate([member, member])) + + +def test_impact_forecast_exceedance_freq_curve_error(impact_forecast): + """Check if ImpactForecast.exceedance_freq_curve raises NotImplementedError""" + with pytest.raises(NotImplementedError): + impact_forecast.local_exceedance_impact(np.array([10, 50, 100])) From f3ab44af752d43e1fb3df717e7cef3aa88ba7bad Mon Sep 17 00:00:00 2001 From: Lukas Riedel <34276446+peanutfun@users.noreply.github.com> Date: Tue, 9 Dec 2025 15:14:27 +0100 Subject: [PATCH 16/34] Fix bug in test --- climada/engine/test/test_impact_calc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/climada/engine/test/test_impact_calc.py b/climada/engine/test/test_impact_calc.py index f7dd2ecb75..dd69de7249 100644 --- a/climada/engine/test/test_impact_calc.py +++ b/climada/engine/test/test_impact_calc.py @@ -692,7 +692,7 @@ def test_single_exp_zero_mdr(self): imp = ImpactCalc(exp, impf_set, haz).impact( assign_centroids=False, save_mat=True ) - check_impact(imp, haz, exp, aai_agg, eai_exp, at_event, at_event) + check_impact(imp, haz, exp, aai_agg, eai_exp, at_event, np.array([at_event]).T) class TestImpactCalcForecast: From 9e4f2bb6b02645b747cf575de3d5186c58064b34 Mon Sep 17 00:00:00 2001 From: luseverin Date: Tue, 9 Dec 2025 15:47:15 +0100 Subject: [PATCH 17/34] Block return_period and exceedance_freq_curve --- climada/engine/impact_forecast.py | 32 +++++++++++++++++++++ climada/engine/test/test_impact_forecast.py | 8 +++++- 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/climada/engine/impact_forecast.py b/climada/engine/impact_forecast.py index e748840f21..3d4dd337b8 100644 --- a/climada/engine/impact_forecast.py +++ b/climada/engine/impact_forecast.py @@ -119,3 +119,35 @@ def local_exceedance_impact( raise NotImplementedError( "local_exceedance_impact is not defined for ImpactForecast" ) + + def local_return_period( + self, + threshold_impact=(1000.0, 10000.0), + method="interpolate", + min_impact=0, + log_frequency=True, + log_impact=True, + bin_decimals=None, + ): + """Compution of local return period for given impact thresholds is not + implemented for ImpactForecast. See climada.engine.impact.Impact for details. + Returns + ------- + NotImplementedError + """ + + LOGGER.error("local_return_period is not defined for ImpactForecast") + raise NotImplementedError( + "local_return_period is not defined for ImpactForecast" + ) + + def calc_freq_curve(self, return_per=None): + """Computation of the impact exceedance frequency curve is not + implemented for ImpactForecast. See climada.engine.impact.Impact for details. + Returns + ------- + NotImplementedError + """ + + LOGGER.error("calc_freq_curve is not defined for ImpactForecast") + raise NotImplementedError("calc_freq_curve is not defined for ImpactForecast") diff --git a/climada/engine/test/test_impact_forecast.py b/climada/engine/test/test_impact_forecast.py index 1657ff8a50..5d3902b5ae 100644 --- a/climada/engine/test/test_impact_forecast.py +++ b/climada/engine/test/test_impact_forecast.py @@ -105,7 +105,13 @@ def test_impact_forecast_concat(impact_forecast, member): npt.assert_array_equal(impact_fc.member, np.concatenate([member, member])) -def test_impact_forecast_exceedance_freq_curve_error(impact_forecast): +def test_impact_forecast_blocked_methods(impact_forecast): """Check if ImpactForecast.exceedance_freq_curve raises NotImplementedError""" with pytest.raises(NotImplementedError): impact_forecast.local_exceedance_impact(np.array([10, 50, 100])) + + with pytest.raises(NotImplementedError): + impact_forecast.local_return_period(np.array([10, 50, 100])) + + with pytest.raises(NotImplementedError): + impact_forecast.calc_freq_curve(np.array([10, 50, 100])) From 727357eda52390f74e5c46ba84054bab49204c7d Mon Sep 17 00:00:00 2001 From: luseverin Date: Tue, 9 Dec 2025 15:49:38 +0100 Subject: [PATCH 18/34] Log warning for at_event getter --- climada/engine/impact_forecast.py | 1 + 1 file changed, 1 insertion(+) diff --git a/climada/engine/impact_forecast.py b/climada/engine/impact_forecast.py index 3d4dd337b8..ead93fe38f 100644 --- a/climada/engine/impact_forecast.py +++ b/climada/engine/impact_forecast.py @@ -91,6 +91,7 @@ def from_impact( @property def at_event(self): + LOGGER.warning("at_event for forecasts is not yet implemented.") return self._at_event @at_event.setter From 0bc584653b7fe2b93708793712b67923b9962160 Mon Sep 17 00:00:00 2001 From: luseverin Date: Tue, 9 Dec 2025 17:13:13 +0100 Subject: [PATCH 19/34] Add mean max min methods to impact forecast and tests --- climada/engine/impact_forecast.py | 135 ++++++++++++++++++++ climada/engine/test/test_impact_forecast.py | 36 ++++++ 2 files changed, 171 insertions(+) diff --git a/climada/engine/impact_forecast.py b/climada/engine/impact_forecast.py index ead93fe38f..444e41b5f4 100644 --- a/climada/engine/impact_forecast.py +++ b/climada/engine/impact_forecast.py @@ -22,6 +22,7 @@ import logging import numpy as np +import scipy.sparse as sparse from ..util import log_level from ..util.forecast import Forecast @@ -152,3 +153,137 @@ def calc_freq_curve(self, return_per=None): LOGGER.error("calc_freq_curve is not defined for ImpactForecast") raise NotImplementedError("calc_freq_curve is not defined for ImpactForecast") + + def _reduce_attrs(self, reduce_method: str): + """ + Reduce the attributes of an ImpactForecast to a single value. + + Attributes are modified as follows: + - event_id: set to [0] + - event_name: set to [reduce_method] + - date: set to the minimum value + - frequency: set to 0 + + Parameters + ---------- + reduce_method : str + The reduction method used to reduce the attributes. + """ + red_event_id = np.asarray([0]) + red_event_name = np.asarray([reduce_method]) + red_date = np.array([self.date.min()]) + red_frequency = np.array([0]) + return red_event_id, red_event_name, red_date, red_frequency + + def min(self): + """ + Reduce the impact matrix and at_event of an ImpactForecast to the minimum + value. + + Parameters + ---------- + None + + Returns + ------- + None + """ + red_imp_mat = sparse.csr_matrix(self.imp_mat.min(axis=0)) + red_at_event = np.array([red_imp_mat.sum()]) + red_event_id, red_event_name, red_date, red_frequency = self._reduce_attrs( + "min" + ) + return ImpactForecast( + lead_time=self.lead_time, + member=self.member, + event_id=red_event_id, + event_name=red_event_name, + date=red_date, + frequency=red_frequency, + frequency_unit=self.frequency_unit, + coord_exp=self.coord_exp, + crs=self.crs, + eai_exp=self.eai_exp, + at_event=red_at_event, + tot_value=self.tot_value, + aai_agg=self.aai_agg, + unit=self.unit, + imp_mat=red_imp_mat, + haz_type=self.haz_type, + ) + + def max(self): + """ + Reduce the impact matrix and at_event of an ImpactForecast to the maximum + value. + + Parameters + ---------- + None + + Returns + ------- + None + """ + red_imp_mat = sparse.csr_matrix(self.imp_mat.max(axis=0)) + red_at_event = np.array([red_imp_mat.sum()]) + red_event_id, red_event_name, red_date, red_frequency = self._reduce_attrs( + "max" + ) + return ImpactForecast( + lead_time=self.lead_time, + member=self.member, + event_id=red_event_id, + event_name=red_event_name, + date=red_date, + frequency=red_frequency, + frequency_unit=self.frequency_unit, + coord_exp=self.coord_exp, + crs=self.crs, + eai_exp=self.eai_exp, + at_event=red_at_event, + tot_value=self.tot_value, + aai_agg=self.aai_agg, + unit=self.unit, + imp_mat=red_imp_mat, + haz_type=self.haz_type, + ) + + def mean(self): + """ + Reduce the impact matrix and at_event of an ImpactForecast to the mean value. + + The mean value is computed by taking the mean of the impact matrix along the + exposure points axis (axis=1) and then taking the mean of the resulting array. + + Parameters + ---------- + None + + Returns + ------- + None + """ + red_imp_mat = sparse.csr_matrix(self.imp_mat.mean(axis=0)) + red_at_event = np.array([red_imp_mat.sum()]) + red_event_id, red_event_name, red_date, red_frequency = self._reduce_attrs( + "mean" + ) + return ImpactForecast( + lead_time=self.lead_time, + member=self.member, + event_id=red_event_id, + event_name=red_event_name, + date=red_date, + frequency=red_frequency, + frequency_unit=self.frequency_unit, + coord_exp=self.coord_exp, + crs=self.crs, + eai_exp=self.eai_exp, + at_event=red_at_event, + tot_value=self.tot_value, + aai_agg=self.aai_agg, + unit=self.unit, + imp_mat=red_imp_mat, + haz_type=self.haz_type, + ) diff --git a/climada/engine/test/test_impact_forecast.py b/climada/engine/test/test_impact_forecast.py index 5d3902b5ae..b4df0bcff9 100644 --- a/climada/engine/test/test_impact_forecast.py +++ b/climada/engine/test/test_impact_forecast.py @@ -115,3 +115,39 @@ def test_impact_forecast_blocked_methods(impact_forecast): with pytest.raises(NotImplementedError): impact_forecast.calc_freq_curve(np.array([10, 50, 100])) + + +def test_impact_forecast_mean_min_max(impact_forecast): + """Check mean, min, and max methods for ImpactForecast""" + imp_fcst_mean = impact_forecast.mean() + imp_fcst_min = impact_forecast.min() + imp_fcst_max = impact_forecast.max() + # sparse.csr_matrix( + # np.array([[0, 0], [1, 1], [2, 2], [3, 3], [30, 30], [31, 31]]) + + # assert imp_mat + npt.assert_array_equal( + imp_fcst_mean.imp_mat.todense(), impact_forecast.imp_mat.todense().mean(axis=0) + ) + npt.assert_array_equal(imp_fcst_min.imp_mat.todense(), np.array([[0, 0]])) + npt.assert_array_equal(imp_fcst_max.imp_mat.todense(), np.array([[31, 31]])) + # assert at_event + npt.assert_array_equal( + imp_fcst_mean.at_event, impact_forecast.at_event.mean() + ) # 134/6 + npt.assert_array_equal(imp_fcst_min.at_event, impact_forecast.at_event.min()) + npt.assert_array_equal(imp_fcst_max.at_event, impact_forecast.at_event.max()) + + # check that attributes where reduced correctly + assert imp_fcst_mean.event_name[0] == "mean" + assert imp_fcst_min.event_name[0] == "min" + assert imp_fcst_max.event_name[0] == "max" + assert imp_fcst_mean.event_id[0] == 0 + assert imp_fcst_min.event_id[0] == 0 + assert imp_fcst_max.event_id[0] == 0 + assert imp_fcst_mean.frequency == 0 + assert imp_fcst_min.frequency == 0 + assert imp_fcst_max.frequency == 0 + assert imp_fcst_mean.date == impact_forecast.date.min() + assert imp_fcst_min.date == impact_forecast.date.min() + assert imp_fcst_max.date == impact_forecast.date.min() From 0aad2a5064f1ad8d902978cb6785b632c329d244 Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 10:30:18 +0100 Subject: [PATCH 20/34] Reduce lead time and member and update test --- climada/engine/impact_forecast.py | 65 ++++++++++----------- climada/engine/test/test_impact_forecast.py | 12 +++- 2 files changed, 41 insertions(+), 36 deletions(-) diff --git a/climada/engine/impact_forecast.py b/climada/engine/impact_forecast.py index 803fc88874..41e1038541 100644 --- a/climada/engine/impact_forecast.py +++ b/climada/engine/impact_forecast.py @@ -194,18 +194,23 @@ def _reduce_attrs(self, reduce_method: str): - event_id: set to [0] - event_name: set to [reduce_method] - date: set to the minimum value - - frequency: set to 0 + - frequency: set to 1 Parameters ---------- reduce_method : str The reduction method used to reduce the attributes. """ - red_event_id = np.asarray([0]) - red_event_name = np.asarray([reduce_method]) - red_date = np.array([self.date.min()]) - red_frequency = np.array([0]) - return red_event_id, red_event_name, red_date, red_frequency + reduced_attrs = { + "lead_time": np.array([np.timedelta64("NaT")]), + "member": np.array([-1]), + "event_id": np.array([0]), + "event_name": np.array([reduce_method]), + "date": np.array([self.date.min()]), + "frequency": np.array([1]), + } + + return reduced_attrs def min(self): """ @@ -222,16 +227,14 @@ def min(self): """ red_imp_mat = sparse.csr_matrix(self.imp_mat.min(axis=0)) red_at_event = np.array([red_imp_mat.sum()]) - red_event_id, red_event_name, red_date, red_frequency = self._reduce_attrs( - "min" - ) + reduced_attrs = self._reduce_attrs("min") return ImpactForecast( - lead_time=self.lead_time, - member=self.member, - event_id=red_event_id, - event_name=red_event_name, - date=red_date, - frequency=red_frequency, + lead_time=reduced_attrs["lead_time"], + member=reduced_attrs["member"], + event_id=reduced_attrs["event_id"], + event_name=reduced_attrs["event_name"], + date=reduced_attrs["date"], + frequency=reduced_attrs["frequency"], frequency_unit=self.frequency_unit, coord_exp=self.coord_exp, crs=self.crs, @@ -259,16 +262,14 @@ def max(self): """ red_imp_mat = sparse.csr_matrix(self.imp_mat.max(axis=0)) red_at_event = np.array([red_imp_mat.sum()]) - red_event_id, red_event_name, red_date, red_frequency = self._reduce_attrs( - "max" - ) + reduced_attrs = self._reduce_attrs("max") return ImpactForecast( - lead_time=self.lead_time, - member=self.member, - event_id=red_event_id, - event_name=red_event_name, - date=red_date, - frequency=red_frequency, + lead_time=reduced_attrs["lead_time"], + member=reduced_attrs["member"], + event_id=reduced_attrs["event_id"], + event_name=reduced_attrs["event_name"], + date=reduced_attrs["date"], + frequency=reduced_attrs["frequency"], frequency_unit=self.frequency_unit, coord_exp=self.coord_exp, crs=self.crs, @@ -298,16 +299,14 @@ def mean(self): """ red_imp_mat = sparse.csr_matrix(self.imp_mat.mean(axis=0)) red_at_event = np.array([red_imp_mat.sum()]) - red_event_id, red_event_name, red_date, red_frequency = self._reduce_attrs( - "mean" - ) + reduced_attrs = self._reduce_attrs("mean") return ImpactForecast( - lead_time=self.lead_time, - member=self.member, - event_id=red_event_id, - event_name=red_event_name, - date=red_date, - frequency=red_frequency, + lead_time=reduced_attrs["lead_time"], + member=reduced_attrs["member"], + event_id=reduced_attrs["event_id"], + event_name=reduced_attrs["event_name"], + date=reduced_attrs["date"], + frequency=reduced_attrs["frequency"], frequency_unit=self.frequency_unit, coord_exp=self.coord_exp, crs=self.crs, diff --git a/climada/engine/test/test_impact_forecast.py b/climada/engine/test/test_impact_forecast.py index 23e38b1240..08067d06f0 100644 --- a/climada/engine/test/test_impact_forecast.py +++ b/climada/engine/test/test_impact_forecast.py @@ -189,15 +189,21 @@ def test_impact_forecast_mean_min_max(impact_forecast): npt.assert_array_equal(imp_fcst_max.at_event, impact_forecast.at_event.max()) # check that attributes where reduced correctly + assert np.isnat(imp_fcst_mean.lead_time[0]) + assert np.isnat(imp_fcst_min.lead_time[0]) + assert np.isnat(imp_fcst_max.lead_time[0]) + assert imp_fcst_mean.member[0] == -1 + assert imp_fcst_min.member[0] == -1 + assert imp_fcst_max.member[0] == -1 assert imp_fcst_mean.event_name[0] == "mean" assert imp_fcst_min.event_name[0] == "min" assert imp_fcst_max.event_name[0] == "max" assert imp_fcst_mean.event_id[0] == 0 assert imp_fcst_min.event_id[0] == 0 assert imp_fcst_max.event_id[0] == 0 - assert imp_fcst_mean.frequency == 0 - assert imp_fcst_min.frequency == 0 - assert imp_fcst_max.frequency == 0 + assert imp_fcst_mean.frequency == 1 + assert imp_fcst_min.frequency == 1 + assert imp_fcst_max.frequency == 1 assert imp_fcst_mean.date == impact_forecast.date.min() assert imp_fcst_min.date == impact_forecast.date.min() assert imp_fcst_max.date == impact_forecast.date.min() From 102f9bec088bfe1bbcee0596fd019ed604f9e0f3 Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 10:37:03 +0100 Subject: [PATCH 21/34] Update docstrings --- climada/engine/impact_forecast.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/climada/engine/impact_forecast.py b/climada/engine/impact_forecast.py index 41e1038541..ff56e0b651 100644 --- a/climada/engine/impact_forecast.py +++ b/climada/engine/impact_forecast.py @@ -191,8 +191,10 @@ def _reduce_attrs(self, reduce_method: str): Reduce the attributes of an ImpactForecast to a single value. Attributes are modified as follows: - - event_id: set to [0] - - event_name: set to [reduce_method] + - lead_time: set to NaT + - member: set to -1 + - event_id: set to 0 + - event_name: set to reduce_method - date: set to the minimum value - frequency: set to 1 @@ -223,7 +225,8 @@ def min(self): Returns ------- - None + ImpactForecast + An ImpactForecast object with the min impact matrix and at_event. """ red_imp_mat = sparse.csr_matrix(self.imp_mat.min(axis=0)) red_at_event = np.array([red_imp_mat.sum()]) @@ -258,7 +261,8 @@ def max(self): Returns ------- - None + ImpactForecast + An ImpactForecast object with the max impact matrix and at_event. """ red_imp_mat = sparse.csr_matrix(self.imp_mat.max(axis=0)) red_at_event = np.array([red_imp_mat.sum()]) @@ -295,7 +299,8 @@ def mean(self): Returns ------- - None + ImpactForecast + An ImpactForecast object with the mean impact matrix and at_event. """ red_imp_mat = sparse.csr_matrix(self.imp_mat.mean(axis=0)) red_at_event = np.array([red_imp_mat.sum()]) From dfce16fc06cefb2dd44d9ea6a6da8bf7b0640f00 Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 10:40:01 +0100 Subject: [PATCH 22/34] Remove useless comments --- climada/engine/test/test_impact_forecast.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/climada/engine/test/test_impact_forecast.py b/climada/engine/test/test_impact_forecast.py index 08067d06f0..5809edb089 100644 --- a/climada/engine/test/test_impact_forecast.py +++ b/climada/engine/test/test_impact_forecast.py @@ -172,8 +172,6 @@ def test_impact_forecast_mean_min_max(impact_forecast): imp_fcst_mean = impact_forecast.mean() imp_fcst_min = impact_forecast.min() imp_fcst_max = impact_forecast.max() - # sparse.csr_matrix( - # np.array([[0, 0], [1, 1], [2, 2], [3, 3], [30, 30], [31, 31]]) # assert imp_mat npt.assert_array_equal( From 0b6e1e46c42b9af409336c713ed5c0422e399ae9 Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 10:59:09 +0100 Subject: [PATCH 23/34] Add min max mean for hazard forecast --- climada/hazard/forecast.py | 133 +++++++++++++++++++++++++++++++++++++ 1 file changed, 133 insertions(+) diff --git a/climada/hazard/forecast.py b/climada/hazard/forecast.py index 5130e66af1..2f557501c1 100644 --- a/climada/hazard/forecast.py +++ b/climada/hazard/forecast.py @@ -104,3 +104,136 @@ def _check_sizes(self): num_entries = len(self.event_id) size(exp_len=num_entries, var=self.member, var_name="Forecast.member") size(exp_len=num_entries, var=self.lead_time, var_name="Forecast.lead_time") + + def _reduce_attrs(self, reduce_method: str): + """ + Reduce the attributes of a HazardForecast to a single value. + + Attributes are modified as follows: + - lead_time: set to NaT + - member: set to -1 + - event_id: set to 0 + - event_name: set to reduce_method + - date: set to the minimum value + - frequency: set to 1 + + Parameters + ---------- + reduce_method : str + The reduction method used to reduce the attributes. + """ + reduced_attrs = { + "lead_time": np.array([np.timedelta64("NaT")]), + "member": np.array([-1]), + "event_id": np.array([0]), + "event_name": np.array([reduce_method]), + "date": np.array([self.date.min()]), + "frequency": np.array([1]), + "orig": np.array([True]), + } + + return reduced_attrs + + def min(self): + """ + Reduce the impact matrix and at_event of a HazardForecast to the minimum + value. + + Parameters + ---------- + None + + Returns + ------- + HazardForecast + A HazardForecast object with the min intensity and fraction. + """ + red_intensity = sparse.csr_matrix(self.intensity.min(axis=0)) + red_fraction = sparse.csr_matrix(self.fraction.sum()) + reduced_attrs = self._reduce_attrs("min") + return HazardForecast( + lead_time=reduced_attrs["lead_time"], + member=reduced_attrs["member"], + haz_type=self.haz_type, + pool=self.pool, + units=self.units, + centroids=self.centroids, + event_id=reduced_attrs["event_id"], + frequency=reduced_attrs["frequency"], + frequency_unit=self.frequency_unit, + event_name=reduced_attrs["event_name"], + date=reduced_attrs["date"], + orig=hazard.orig, + intensity=red_intensity, + fraction=red_fraction, + ) + + def max(self): + """ + Reduce the impact matrix and at_event of a HazardForecast to the maximum + value. + + Parameters + ---------- + None + + Returns + ------- + HazardForecast + A HazardForecast object with the min intensity and fraction. + """ + red_intensity = sparse.csr_matrix(self.intensity.min(axis=0)) + red_fraction = sparse.csr_matrix(self.fraction.sum()) + reduced_attrs = self._reduce_attrs("max") + return HazardForecast( + lead_time=reduced_attrs["lead_time"], + member=reduced_attrs["member"], + haz_type=self.haz_type, + pool=self.pool, + units=self.units, + centroids=self.centroids, + event_id=reduced_attrs["event_id"], + frequency=reduced_attrs["frequency"], + frequency_unit=self.frequency_unit, + event_name=reduced_attrs["event_name"], + date=reduced_attrs["date"], + orig=hazard.orig, + intensity=red_intensity, + fraction=red_fraction, + ) + + def mean(self): + """ + Reduce the impact matrix and at_event of a HazardForecast to the mean value. + + The mean value is computed by taking the mean of the impact matrix along the + exposure points axis (axis=1) and then taking the mean of the resulting array. + + Parameters + ---------- + None + + Returns + ------- + HazardForecast + A HazardForecast object with the min intensity and fraction. + """ + red_intensity = sparse.csr_matrix(self.intensity.min(axis=0)) + red_fraction = sparse.csr_matrix(self.fraction.sum()) + reduced_attrs = self._reduce_attrs("mean") + return HazardForecast( + lead_time=reduced_attrs["lead_time"], + member=reduced_attrs["member"], + haz_type=self.haz_type, + pool=self.pool, + units=self.units, + centroids=self.centroids, + event_id=reduced_attrs["event_id"], + frequency=reduced_attrs["frequency"], + frequency_unit=self.frequency_unit, + event_name=reduced_attrs["event_name"], + date=reduced_attrs["date"], + orig=hazard.orig, + intensity=red_intensity, + fraction=red_fraction, + ) From 3fc25af3f59b8234f5c5f200734bf1e7f3c51135 Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 11:23:22 +0100 Subject: [PATCH 24/34] Correct some mistakes in mean min max on hazard forecast --- climada/hazard/forecast.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/climada/hazard/forecast.py b/climada/hazard/forecast.py index 2f557501c1..ef60ab1d84 100644 --- a/climada/hazard/forecast.py +++ b/climada/hazard/forecast.py @@ -22,6 +22,7 @@ import logging import numpy as np +import scipy.sparse as sparse from ..util.checker import size from ..util.forecast import Forecast @@ -149,7 +150,7 @@ def min(self): A HazardForecast object with the min intensity and fraction. """ red_intensity = sparse.csr_matrix(self.intensity.min(axis=0)) - red_fraction = sparse.csr_matrix(self.fraction.sum()) + red_fraction = sparse.csr_matrix(self.fraction.min(axis=0)) reduced_attrs = self._reduce_attrs("min") return HazardForecast( lead_time=reduced_attrs["lead_time"], @@ -163,7 +164,7 @@ def min(self): frequency_unit=self.frequency_unit, event_name=reduced_attrs["event_name"], date=reduced_attrs["date"], - orig=hazard.orig, + orig=reduced_attrs["orig"], intensity=red_intensity, fraction=red_fraction, ) @@ -182,8 +183,8 @@ def max(self): HazardForecast A HazardForecast object with the min intensity and fraction. """ - red_intensity = sparse.csr_matrix(self.intensity.min(axis=0)) - red_fraction = sparse.csr_matrix(self.fraction.sum()) + red_intensity = sparse.csr_matrix(self.intensity.max(axis=0)) + red_fraction = sparse.csr_matrix(self.fraction.max(axis=0)) reduced_attrs = self._reduce_attrs("max") return HazardForecast( lead_time=reduced_attrs["lead_time"], @@ -197,7 +198,7 @@ def max(self): frequency_unit=self.frequency_unit, event_name=reduced_attrs["event_name"], date=reduced_attrs["date"], - orig=hazard.orig, + orig=reduced_attrs["orig"], intensity=red_intensity, fraction=red_fraction, ) @@ -218,8 +219,8 @@ def mean(self): HazardForecast A HazardForecast object with the min intensity and fraction. """ - red_intensity = sparse.csr_matrix(self.intensity.min(axis=0)) - red_fraction = sparse.csr_matrix(self.fraction.sum()) + red_intensity = sparse.csr_matrix(self.intensity.mean(axis=0)) + red_fraction = sparse.csr_matrix(self.fraction.mean(axis=0)) reduced_attrs = self._reduce_attrs("mean") return HazardForecast( lead_time=reduced_attrs["lead_time"], @@ -233,7 +234,7 @@ def mean(self): frequency_unit=self.frequency_unit, event_name=reduced_attrs["event_name"], date=reduced_attrs["date"], - orig=hazard.orig, + orig=reduced_attrs["orig"], intensity=red_intensity, fraction=red_fraction, ) From a4fe3b0f81261b0085648a328dbd4c8eeb4c9ce6 Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 11:23:48 +0100 Subject: [PATCH 25/34] Add tests for hazard forecast mean min max --- climada/hazard/test/test_forecast.py | 51 ++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/climada/hazard/test/test_forecast.py b/climada/hazard/test/test_forecast.py index b102ee2d17..3c0e7ac206 100644 --- a/climada/hazard/test/test_forecast.py +++ b/climada/hazard/test/test_forecast.py @@ -164,3 +164,54 @@ def test_write_read_hazard_forecast(haz_fc, tmp_path): else: # npt.assert_array_equal also works for comparing int, float or list npt.assert_array_equal(haz_fc.__dict__[key], haz_fc_read.__dict__[key]) + + +def test_hazard_forecast_mean_min_max(haz_fc): + """Check mean, min, and max methods for ImpactForecast""" + haz_fcst_mean = haz_fc.mean() + haz_fcst_min = haz_fc.min() + haz_fcst_max = haz_fc.max() + + # assert intensity + npt.assert_array_equal( + haz_fcst_mean.intensity.todense(), haz_fc.intensity.todense().mean(axis=0) + ) + npt.assert_array_equal( + haz_fcst_min.intensity.todense(), haz_fc.intensity.todense().min(axis=0) + ) + npt.assert_array_equal( + haz_fcst_max.intensity.todense(), haz_fc.intensity.todense().max(axis=0) + ) + # assert fraction + npt.assert_array_equal( + haz_fcst_mean.fraction.todense(), haz_fc.fraction.todense().mean(axis=0) + ) + npt.assert_array_equal( + haz_fcst_min.fraction.todense(), haz_fc.fraction.todense().min(axis=0) + ) + npt.assert_array_equal( + haz_fcst_max.fraction.todense(), haz_fc.fraction.todense().max(axis=0) + ) + + # check that attributes where reduced correctly + assert np.isnat(haz_fcst_mean.lead_time[0]) + assert np.isnat(haz_fcst_min.lead_time[0]) + assert np.isnat(haz_fcst_max.lead_time[0]) + assert haz_fcst_mean.member[0] == -1 + assert haz_fcst_min.member[0] == -1 + assert haz_fcst_max.member[0] == -1 + assert haz_fcst_mean.event_name[0] == "mean" + assert haz_fcst_min.event_name[0] == "min" + assert haz_fcst_max.event_name[0] == "max" + assert haz_fcst_mean.event_id[0] == 0 + assert haz_fcst_min.event_id[0] == 0 + assert haz_fcst_max.event_id[0] == 0 + assert haz_fcst_mean.frequency == 1 + assert haz_fcst_min.frequency == 1 + assert haz_fcst_max.frequency == 1 + assert haz_fcst_mean.date == haz_fc.date.min() + assert haz_fcst_min.date == haz_fc.date.min() + assert haz_fcst_max.date == haz_fc.date.min() + assert np.all(haz_fcst_mean.orig) + assert np.all(haz_fcst_min.orig) + assert np.all(haz_fcst_max.orig) From e099868e67469327e393689d277df7425b28a5b5 Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 11:55:13 +0100 Subject: [PATCH 26/34] Set reduced date to 0 and cosmetic changes --- climada/hazard/forecast.py | 23 ++++++++++------------- climada/hazard/test/test_forecast.py | 6 +++--- 2 files changed, 13 insertions(+), 16 deletions(-) diff --git a/climada/hazard/forecast.py b/climada/hazard/forecast.py index ef60ab1d84..29e2fe9d0c 100644 --- a/climada/hazard/forecast.py +++ b/climada/hazard/forecast.py @@ -106,7 +106,7 @@ def _check_sizes(self): size(exp_len=num_entries, var=self.member, var_name="Forecast.member") size(exp_len=num_entries, var=self.lead_time, var_name="Forecast.lead_time") - def _reduce_attrs(self, reduce_method: str): + def _reduce_attrs(self, event_name: str): """ Reduce the attributes of a HazardForecast to a single value. @@ -114,21 +114,21 @@ def _reduce_attrs(self, reduce_method: str): - lead_time: set to NaT - member: set to -1 - event_id: set to 0 - - event_name: set to reduce_method - - date: set to the minimum value + - event_name: set to the name of the reduction method (default) + - date: set to 0 - frequency: set to 1 Parameters ---------- - reduce_method : str - The reduction method used to reduce the attributes. + event_name : str + The event_name given to the reduced data. """ reduced_attrs = { "lead_time": np.array([np.timedelta64("NaT")]), "member": np.array([-1]), "event_id": np.array([0]), - "event_name": np.array([reduce_method]), - "date": np.array([self.date.min()]), + "event_name": np.array([event_name]), + "date": np.array([0]), "frequency": np.array([1]), "orig": np.array([True]), } @@ -137,7 +137,7 @@ def _reduce_attrs(self, reduce_method: str): def min(self): """ - Reduce the impact matrix and at_event of a HazardForecast to the minimum + Reduce the intensity and fraction of a HazardForecast to the minimum value. Parameters @@ -171,7 +171,7 @@ def min(self): def max(self): """ - Reduce the impact matrix and at_event of a HazardForecast to the maximum + Reduce the intensity and fraction of a HazardForecast to the maximum value. Parameters @@ -205,10 +205,7 @@ def max(self): def mean(self): """ - Reduce the impact matrix and at_event of a HazardForecast to the mean value. - - The mean value is computed by taking the mean of the impact matrix along the - exposure points axis (axis=1) and then taking the mean of the resulting array. + Reduce the intensity and fraction of a HazardForecast to the mean value. Parameters ---------- diff --git a/climada/hazard/test/test_forecast.py b/climada/hazard/test/test_forecast.py index 3c0e7ac206..0366043af7 100644 --- a/climada/hazard/test/test_forecast.py +++ b/climada/hazard/test/test_forecast.py @@ -209,9 +209,9 @@ def test_hazard_forecast_mean_min_max(haz_fc): assert haz_fcst_mean.frequency == 1 assert haz_fcst_min.frequency == 1 assert haz_fcst_max.frequency == 1 - assert haz_fcst_mean.date == haz_fc.date.min() - assert haz_fcst_min.date == haz_fc.date.min() - assert haz_fcst_max.date == haz_fc.date.min() + assert haz_fcst_mean.date == 0 + assert haz_fcst_min.date == 0 + assert haz_fcst_max.date == 0 assert np.all(haz_fcst_mean.orig) assert np.all(haz_fcst_min.orig) assert np.all(haz_fcst_max.orig) From 23660de455a9ae3d5d9c4d48f855e0885fc30d61 Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 11:58:14 +0100 Subject: [PATCH 27/34] Set reduced date to 0 in impact forecast --- climada/engine/impact_forecast.py | 17 +++++++---------- climada/engine/test/test_impact_forecast.py | 6 +++--- 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/climada/engine/impact_forecast.py b/climada/engine/impact_forecast.py index ff56e0b651..4812df6842 100644 --- a/climada/engine/impact_forecast.py +++ b/climada/engine/impact_forecast.py @@ -186,7 +186,7 @@ def _check_sizes(self): size(exp_len=num_entries, var=self.member, var_name="Forecast.member") size(exp_len=num_entries, var=self.lead_time, var_name="Forecast.lead_time") - def _reduce_attrs(self, reduce_method: str): + def _reduce_attrs(self, event_name: str): """ Reduce the attributes of an ImpactForecast to a single value. @@ -194,21 +194,21 @@ def _reduce_attrs(self, reduce_method: str): - lead_time: set to NaT - member: set to -1 - event_id: set to 0 - - event_name: set to reduce_method - - date: set to the minimum value + - event_name: set to the name of the reduction method (default) + - date: set to 0 - frequency: set to 1 Parameters ---------- - reduce_method : str - The reduction method used to reduce the attributes. + event_name : str + The event name given to the reduced data. """ reduced_attrs = { "lead_time": np.array([np.timedelta64("NaT")]), "member": np.array([-1]), "event_id": np.array([0]), - "event_name": np.array([reduce_method]), - "date": np.array([self.date.min()]), + "event_name": np.array([event_name]), + "date": np.array([0]), "frequency": np.array([1]), } @@ -290,9 +290,6 @@ def mean(self): """ Reduce the impact matrix and at_event of an ImpactForecast to the mean value. - The mean value is computed by taking the mean of the impact matrix along the - exposure points axis (axis=1) and then taking the mean of the resulting array. - Parameters ---------- None diff --git a/climada/engine/test/test_impact_forecast.py b/climada/engine/test/test_impact_forecast.py index 5809edb089..e7d8e32f77 100644 --- a/climada/engine/test/test_impact_forecast.py +++ b/climada/engine/test/test_impact_forecast.py @@ -202,6 +202,6 @@ def test_impact_forecast_mean_min_max(impact_forecast): assert imp_fcst_mean.frequency == 1 assert imp_fcst_min.frequency == 1 assert imp_fcst_max.frequency == 1 - assert imp_fcst_mean.date == impact_forecast.date.min() - assert imp_fcst_min.date == impact_forecast.date.min() - assert imp_fcst_max.date == impact_forecast.date.min() + assert imp_fcst_mean.date == 0 + assert imp_fcst_min.date == 0 + assert imp_fcst_max.date == 0 From 64149dff1b37e861e8df88da23d9bcb02c7bdae1 Mon Sep 17 00:00:00 2001 From: Lukas Riedel <34276446+peanutfun@users.noreply.github.com> Date: Wed, 10 Dec 2025 13:53:37 +0100 Subject: [PATCH 28/34] Simplify code and tests --- climada/engine/impact_forecast.py | 28 ++-------- climada/engine/test/test_impact_forecast.py | 59 ++++++++++----------- climada/hazard/forecast.py | 35 +++--------- climada/hazard/test/test_forecast.py | 58 ++++++-------------- 4 files changed, 56 insertions(+), 124 deletions(-) diff --git a/climada/engine/impact_forecast.py b/climada/engine/impact_forecast.py index 4812df6842..55dfbf7033 100644 --- a/climada/engine/impact_forecast.py +++ b/climada/engine/impact_forecast.py @@ -228,16 +228,9 @@ def min(self): ImpactForecast An ImpactForecast object with the min impact matrix and at_event. """ - red_imp_mat = sparse.csr_matrix(self.imp_mat.min(axis=0)) + red_imp_mat = self.imp_mat.min(axis=0).tocsr() red_at_event = np.array([red_imp_mat.sum()]) - reduced_attrs = self._reduce_attrs("min") return ImpactForecast( - lead_time=reduced_attrs["lead_time"], - member=reduced_attrs["member"], - event_id=reduced_attrs["event_id"], - event_name=reduced_attrs["event_name"], - date=reduced_attrs["date"], - frequency=reduced_attrs["frequency"], frequency_unit=self.frequency_unit, coord_exp=self.coord_exp, crs=self.crs, @@ -248,6 +241,7 @@ def min(self): unit=self.unit, imp_mat=red_imp_mat, haz_type=self.haz_type, + **self._reduce_attrs("min"), ) def max(self): @@ -264,16 +258,9 @@ def max(self): ImpactForecast An ImpactForecast object with the max impact matrix and at_event. """ - red_imp_mat = sparse.csr_matrix(self.imp_mat.max(axis=0)) + red_imp_mat = self.imp_mat.max(axis=0).tocsr() red_at_event = np.array([red_imp_mat.sum()]) - reduced_attrs = self._reduce_attrs("max") return ImpactForecast( - lead_time=reduced_attrs["lead_time"], - member=reduced_attrs["member"], - event_id=reduced_attrs["event_id"], - event_name=reduced_attrs["event_name"], - date=reduced_attrs["date"], - frequency=reduced_attrs["frequency"], frequency_unit=self.frequency_unit, coord_exp=self.coord_exp, crs=self.crs, @@ -284,6 +271,7 @@ def max(self): unit=self.unit, imp_mat=red_imp_mat, haz_type=self.haz_type, + **self._reduce_attrs("max"), ) def mean(self): @@ -301,14 +289,7 @@ def mean(self): """ red_imp_mat = sparse.csr_matrix(self.imp_mat.mean(axis=0)) red_at_event = np.array([red_imp_mat.sum()]) - reduced_attrs = self._reduce_attrs("mean") return ImpactForecast( - lead_time=reduced_attrs["lead_time"], - member=reduced_attrs["member"], - event_id=reduced_attrs["event_id"], - event_name=reduced_attrs["event_name"], - date=reduced_attrs["date"], - frequency=reduced_attrs["frequency"], frequency_unit=self.frequency_unit, coord_exp=self.coord_exp, crs=self.crs, @@ -319,4 +300,5 @@ def mean(self): unit=self.unit, imp_mat=red_imp_mat, haz_type=self.haz_type, + **self._reduce_attrs("mean"), ) diff --git a/climada/engine/test/test_impact_forecast.py b/climada/engine/test/test_impact_forecast.py index e7d8e32f77..f4544f4b60 100644 --- a/climada/engine/test/test_impact_forecast.py +++ b/climada/engine/test/test_impact_forecast.py @@ -167,41 +167,38 @@ def test_impact_forecast_blocked_methods(impact_forecast): impact_forecast.calc_freq_curve(np.array([10, 50, 100])) -def test_impact_forecast_mean_min_max(impact_forecast): +@pytest.fixture +def impact_forecast_stats(impact_kwargs, lead_time, member): + max_index = 4 + for key, val in impact_kwargs.items(): + if isinstance(val, (np.ndarray, list)): + impact_kwargs[key] = val[:max_index] + elif isinstance(val, csr_matrix): + impact_kwargs[key] = val[:max_index, :] + impact_kwargs["imp_mat"] = csr_matrix([[1, 0], [0, 1], [3, 2], [2, 3]]) + impact_kwargs["at_event"] = np.array([1, 1, 5, 5]) + return ImpactForecast( + lead_time=lead_time[:max_index], member=member[:max_index], **impact_kwargs + ) + + +@pytest.mark.parametrize("attr", ["min", "mean", "max"]) +def test_impact_forecast_min_mean_max(impact_forecast_stats, attr): """Check mean, min, and max methods for ImpactForecast""" - imp_fcst_mean = impact_forecast.mean() - imp_fcst_min = impact_forecast.min() - imp_fcst_max = impact_forecast.max() + imp_fc_reduced = getattr(impact_forecast_stats, attr)() # assert imp_mat npt.assert_array_equal( - imp_fcst_mean.imp_mat.todense(), impact_forecast.imp_mat.todense().mean(axis=0) + imp_fc_reduced.imp_mat.todense(), + getattr(impact_forecast_stats.imp_mat.todense(), attr)(axis=0), ) - npt.assert_array_equal(imp_fcst_min.imp_mat.todense(), np.array([[0, 0]])) - npt.assert_array_equal(imp_fcst_max.imp_mat.todense(), np.array([[31, 31]])) - # assert at_event - npt.assert_array_equal( - imp_fcst_mean.at_event, impact_forecast.at_event.mean() - ) # 134/6 - npt.assert_array_equal(imp_fcst_min.at_event, impact_forecast.at_event.min()) - npt.assert_array_equal(imp_fcst_max.at_event, impact_forecast.at_event.max()) + at_event_expected = {"min": [0], "mean": [3], "max": [6]} + npt.assert_array_equal(imp_fc_reduced.at_event, at_event_expected[attr]) # check that attributes where reduced correctly - assert np.isnat(imp_fcst_mean.lead_time[0]) - assert np.isnat(imp_fcst_min.lead_time[0]) - assert np.isnat(imp_fcst_max.lead_time[0]) - assert imp_fcst_mean.member[0] == -1 - assert imp_fcst_min.member[0] == -1 - assert imp_fcst_max.member[0] == -1 - assert imp_fcst_mean.event_name[0] == "mean" - assert imp_fcst_min.event_name[0] == "min" - assert imp_fcst_max.event_name[0] == "max" - assert imp_fcst_mean.event_id[0] == 0 - assert imp_fcst_min.event_id[0] == 0 - assert imp_fcst_max.event_id[0] == 0 - assert imp_fcst_mean.frequency == 1 - assert imp_fcst_min.frequency == 1 - assert imp_fcst_max.frequency == 1 - assert imp_fcst_mean.date == 0 - assert imp_fcst_min.date == 0 - assert imp_fcst_max.date == 0 + npt.assert_array_equal(np.isnat(imp_fc_reduced.lead_time), [True]) + npt.assert_array_equal(imp_fc_reduced.member, [-1]) + npt.assert_array_equal(imp_fc_reduced.event_name, [attr]) + npt.assert_array_equal(imp_fc_reduced.event_id, [0]) + npt.assert_array_equal(imp_fc_reduced.frequency, [1]) + npt.assert_array_equal(imp_fc_reduced.date, [0]) diff --git a/climada/hazard/forecast.py b/climada/hazard/forecast.py index 29e2fe9d0c..614b15059b 100644 --- a/climada/hazard/forecast.py +++ b/climada/hazard/forecast.py @@ -149,24 +149,17 @@ def min(self): HazardForecast A HazardForecast object with the min intensity and fraction. """ - red_intensity = sparse.csr_matrix(self.intensity.min(axis=0)) - red_fraction = sparse.csr_matrix(self.fraction.min(axis=0)) - reduced_attrs = self._reduce_attrs("min") + red_intensity = self.intensity.min(axis=0).tocsr() + red_fraction = self.fraction.min(axis=0).tocsr() return HazardForecast( - lead_time=reduced_attrs["lead_time"], - member=reduced_attrs["member"], haz_type=self.haz_type, pool=self.pool, units=self.units, centroids=self.centroids, - event_id=reduced_attrs["event_id"], - frequency=reduced_attrs["frequency"], frequency_unit=self.frequency_unit, - event_name=reduced_attrs["event_name"], - date=reduced_attrs["date"], - orig=reduced_attrs["orig"], intensity=red_intensity, fraction=red_fraction, + **self._reduce_attrs("min"), ) def max(self): @@ -183,24 +176,17 @@ def max(self): HazardForecast A HazardForecast object with the min intensity and fraction. """ - red_intensity = sparse.csr_matrix(self.intensity.max(axis=0)) - red_fraction = sparse.csr_matrix(self.fraction.max(axis=0)) - reduced_attrs = self._reduce_attrs("max") + red_intensity = self.intensity.max(axis=0).tocsr() + red_fraction = self.fraction.max(axis=0).tocsr() return HazardForecast( - lead_time=reduced_attrs["lead_time"], - member=reduced_attrs["member"], haz_type=self.haz_type, pool=self.pool, units=self.units, centroids=self.centroids, - event_id=reduced_attrs["event_id"], - frequency=reduced_attrs["frequency"], frequency_unit=self.frequency_unit, - event_name=reduced_attrs["event_name"], - date=reduced_attrs["date"], - orig=reduced_attrs["orig"], intensity=red_intensity, fraction=red_fraction, + **self._reduce_attrs("max"), ) def mean(self): @@ -218,20 +204,13 @@ def mean(self): """ red_intensity = sparse.csr_matrix(self.intensity.mean(axis=0)) red_fraction = sparse.csr_matrix(self.fraction.mean(axis=0)) - reduced_attrs = self._reduce_attrs("mean") return HazardForecast( - lead_time=reduced_attrs["lead_time"], - member=reduced_attrs["member"], haz_type=self.haz_type, pool=self.pool, units=self.units, centroids=self.centroids, - event_id=reduced_attrs["event_id"], - frequency=reduced_attrs["frequency"], frequency_unit=self.frequency_unit, - event_name=reduced_attrs["event_name"], - date=reduced_attrs["date"], - orig=reduced_attrs["orig"], intensity=red_intensity, fraction=red_fraction, + **self._reduce_attrs("mean"), ) diff --git a/climada/hazard/test/test_forecast.py b/climada/hazard/test/test_forecast.py index 0366043af7..ac5290b0b8 100644 --- a/climada/hazard/test/test_forecast.py +++ b/climada/hazard/test/test_forecast.py @@ -166,52 +166,26 @@ def test_write_read_hazard_forecast(haz_fc, tmp_path): npt.assert_array_equal(haz_fc.__dict__[key], haz_fc_read.__dict__[key]) -def test_hazard_forecast_mean_min_max(haz_fc): +@pytest.mark.parametrize("attr", ["min", "mean", "max"]) +def test_hazard_forecast_mean_min_max(haz_fc, attr): """Check mean, min, and max methods for ImpactForecast""" - haz_fcst_mean = haz_fc.mean() - haz_fcst_min = haz_fc.min() - haz_fcst_max = haz_fc.max() + haz_fcst_reduced = getattr(haz_fc, attr)() - # assert intensity + # Assert sparse matrices npt.assert_array_equal( - haz_fcst_mean.intensity.todense(), haz_fc.intensity.todense().mean(axis=0) + haz_fcst_reduced.intensity.todense(), + getattr(haz_fc.intensity.todense(), attr)(axis=0), ) npt.assert_array_equal( - haz_fcst_min.intensity.todense(), haz_fc.intensity.todense().min(axis=0) - ) - npt.assert_array_equal( - haz_fcst_max.intensity.todense(), haz_fc.intensity.todense().max(axis=0) - ) - # assert fraction - npt.assert_array_equal( - haz_fcst_mean.fraction.todense(), haz_fc.fraction.todense().mean(axis=0) - ) - npt.assert_array_equal( - haz_fcst_min.fraction.todense(), haz_fc.fraction.todense().min(axis=0) - ) - npt.assert_array_equal( - haz_fcst_max.fraction.todense(), haz_fc.fraction.todense().max(axis=0) + haz_fcst_reduced.fraction.todense(), + getattr(haz_fc.fraction.todense(), attr)(axis=0), ) - # check that attributes where reduced correctly - assert np.isnat(haz_fcst_mean.lead_time[0]) - assert np.isnat(haz_fcst_min.lead_time[0]) - assert np.isnat(haz_fcst_max.lead_time[0]) - assert haz_fcst_mean.member[0] == -1 - assert haz_fcst_min.member[0] == -1 - assert haz_fcst_max.member[0] == -1 - assert haz_fcst_mean.event_name[0] == "mean" - assert haz_fcst_min.event_name[0] == "min" - assert haz_fcst_max.event_name[0] == "max" - assert haz_fcst_mean.event_id[0] == 0 - assert haz_fcst_min.event_id[0] == 0 - assert haz_fcst_max.event_id[0] == 0 - assert haz_fcst_mean.frequency == 1 - assert haz_fcst_min.frequency == 1 - assert haz_fcst_max.frequency == 1 - assert haz_fcst_mean.date == 0 - assert haz_fcst_min.date == 0 - assert haz_fcst_max.date == 0 - assert np.all(haz_fcst_mean.orig) - assert np.all(haz_fcst_min.orig) - assert np.all(haz_fcst_max.orig) + # Check that attributes where reduced correctly + npt.assert_array_equal(np.isnat(haz_fcst_reduced.lead_time), [True]) + npt.assert_array_equal(haz_fcst_reduced.member, [-1]) + npt.assert_array_equal(haz_fcst_reduced.event_name, [attr]) + npt.assert_array_equal(haz_fcst_reduced.event_id, [0]) + npt.assert_array_equal(haz_fcst_reduced.frequency, [1]) + npt.assert_array_equal(haz_fcst_reduced.date, [0]) + npt.assert_array_equal(haz_fcst_reduced.orig, [True]) From 2c35791b5d4af7135772bfef76cdade1fa5511dc Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 15:14:52 +0100 Subject: [PATCH 29/34] Add reduce_unique_selection in forecast base class --- climada/util/forecast.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/climada/util/forecast.py b/climada/util/forecast.py index 94b9751a8b..e9631e2619 100644 --- a/climada/util/forecast.py +++ b/climada/util/forecast.py @@ -92,3 +92,29 @@ def idx_lead_time(self, lead_time: np.ndarray) -> np.ndarray: """ return np.isin(self.lead_time, lead_time) + + +def reduce_unique_selection(forecast, values, select, reduce_attr): + """ + Reduce an attribute of a forecast object by selecting unique values + and performing an attribute reduction method. + Parameters + ---------- + values : np.ndarray + Array of values for which to select and reduce the attribute. + select : str + Name of the attribute to select on (e.g. 'lead_time', 'member'). + reduce_attr : str + Name of the attribute reduction method to call (e.g. 'min', 'mean'). + Returns + ------- + Forecast + Forecast object with the attribute reduced by the reduction method + and selected by the unique values. + """ + return forecast.concat( + [ + getattr(forecast.select(**{select: [val]}), reduce_attr)(dim=None) + for val in np.unique(values) + ] + ) From c91a2cb4397033ff11d28e1879ae9399480a7ca7 Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 15:17:27 +0100 Subject: [PATCH 30/34] Add dim to mean min max in hazard forecast --- climada/hazard/forecast.py | 32 +++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/climada/hazard/forecast.py b/climada/hazard/forecast.py index abab0d0787..493bfbffcc 100644 --- a/climada/hazard/forecast.py +++ b/climada/hazard/forecast.py @@ -25,7 +25,7 @@ import scipy.sparse as sparse from ..util.checker import size -from ..util.forecast import Forecast +from ..util.forecast import Forecast, reduce_unique_selection from .base import Hazard LOGGER = logging.getLogger(__name__) @@ -135,20 +135,26 @@ def _reduce_attrs(self, event_name: str): return reduced_attrs - def min(self): + def min(self, dim=None): """ Reduce the intensity and fraction of a HazardForecast to the minimum value. Parameters ---------- - None + dim : str | None + Dimension to reduce over. If None, reduce over all data. Returns ------- HazardForecast A HazardForecast object with the min intensity and fraction. """ + if dim is not None: + return reduce_unique_selection( + values=getattr(self.dim), select=dim, reduce_attr="min" + ) + red_intensity = self.intensity.min(axis=0).tocsr() red_fraction = self.fraction.min(axis=0).tocsr() return HazardForecast( @@ -162,20 +168,26 @@ def min(self): **self._reduce_attrs("min"), ) - def max(self): + def max(self, dim=None): """ Reduce the intensity and fraction of a HazardForecast to the maximum value. Parameters ---------- - None + dim : str | None + Dimension to reduce over. If None, reduce over all data. Returns ------- HazardForecast A HazardForecast object with the min intensity and fraction. """ + if dim is not None: + return reduce_unique_selection( + values=getattr(self.dim), select=dim, reduce_attr="max" + ) + red_intensity = self.intensity.max(axis=0).tocsr() red_fraction = self.fraction.max(axis=0).tocsr() return HazardForecast( @@ -189,19 +201,25 @@ def max(self): **self._reduce_attrs("max"), ) - def mean(self): + def mean(self, dim=None): """ Reduce the intensity and fraction of a HazardForecast to the mean value. Parameters ---------- - None + dim : str | None + Dimension to reduce over. If None, reduce over all data. Returns ------- HazardForecast A HazardForecast object with the min intensity and fraction. """ + if dim is not None: + return reduce_unique_selection( + values=getattr(self.dim), select=dim, reduce_attr="mean" + ) + red_intensity = sparse.csr_matrix(self.intensity.mean(axis=0)) red_fraction = sparse.csr_matrix(self.fraction.mean(axis=0)) return HazardForecast( From 64574ec24527c83e7bac3696b3b710ec1794d336 Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 15:18:16 +0100 Subject: [PATCH 31/34] Draft test mean min max with dim hazard forecat --- climada/hazard/test/test_forecast.py | 59 ++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/climada/hazard/test/test_forecast.py b/climada/hazard/test/test_forecast.py index 99bc1d6d73..f1eaa30c52 100644 --- a/climada/hazard/test/test_forecast.py +++ b/climada/hazard/test/test_forecast.py @@ -258,3 +258,62 @@ def test_hazard_forecast_mean_min_max(haz_fc, attr): npt.assert_array_equal(haz_fcst_reduced.frequency, [1]) npt.assert_array_equal(haz_fcst_reduced.date, [0]) npt.assert_array_equal(haz_fcst_reduced.orig, [True]) + + +def test_hazard_forecast_mean_min_max_dim(haz_fc): + """Check mean, min, and max methods for ImpactForecast with dim argument""" + for attr in ["min", "mean", "max"]: + for dim, unique_vals in zip( + ["member", "lead_time"], + [np.unique(haz_fc.member), np.unique(haz_fc.lead_time)], + ): + haz_fcst_reduced = getattr(haz_fc, attr)(dim=dim) + # Assert sparse matrices + expected_intensity = [] + expected_fraction = [] + for val in unique_vals: + mask = getattr(haz_fc, dim) == val + expected_intensity.append( + getattr(haz_fc.intensity.todense()[mask], attr)(axis=0) + ) + expected_fraction.append( + getattr(haz_fc.fraction.todense()[mask], attr)(axis=0) + ) + npt.assert_array_equal( + haz_fcst_reduced.intensity.todense(), + np.vstack(expected_intensity), + ) + npt.assert_array_equal( + haz_fcst_reduced.fraction.todense(), + np.vstack(expected_fraction), + ) + + # Check that attributes where reduced correctly + if dim == "member": + npt.assert_array_equal(haz_fcst_reduced.member, unique_vals) + npt.assert_array_equal( + haz_fcst_reduced.lead_time, + np.array([np.timedelta64("NaT")] * len(unique_vals)), + ) + else: # dim == "lead_time" + npt.assert_array_equal(haz_fcst_reduced.lead_time, unique_vals) + npt.assert_array_equal( + haz_fcst_reduced.member, + np.array([-1] * len(unique_vals)), + ) + npt.assert_array_equal( + haz_fcst_reduced.event_name, + np.array([attr] * len(unique_vals)), + ) + npt.assert_array_equal(haz_fcst_reduced.event_id, [0] * len(unique_vals)) + npt.assert_array_equal(haz_fcst_reduced.frequency, [1] * len(unique_vals)) + npt.assert_array_equal(haz_fcst_reduced.date, [0] * len(unique_vals)) + npt.assert_array_equal(haz_fcst_reduced.orig, [True] * len(unique_vals)) + # TODO add test in case no reduction happens (e.g., all values along dim are unique) + + +def test_hazard_forecast_mean_max_min_dim_error(haz_fc): + """Check mean, min, and max methods for ImpactForecast with dim argument""" + for attr in ["min", "mean", "max"]: + with pytest.raises(ValueError, match="not a valid dimension"): + getattr(haz_fc, attr)(dim="invalid_dim") From 1076719d2fd18833579b4057c10349daffcdd381 Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 16:33:02 +0100 Subject: [PATCH 32/34] Correct wrong use of self.dim in mean min max --- climada/hazard/forecast.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/climada/hazard/forecast.py b/climada/hazard/forecast.py index 493bfbffcc..424f7b8331 100644 --- a/climada/hazard/forecast.py +++ b/climada/hazard/forecast.py @@ -152,7 +152,7 @@ def min(self, dim=None): """ if dim is not None: return reduce_unique_selection( - values=getattr(self.dim), select=dim, reduce_attr="min" + self, values=dim, select=dim, reduce_attr="min" ) red_intensity = self.intensity.min(axis=0).tocsr() @@ -185,7 +185,7 @@ def max(self, dim=None): """ if dim is not None: return reduce_unique_selection( - values=getattr(self.dim), select=dim, reduce_attr="max" + self, values=dim, select=dim, reduce_attr="max" ) red_intensity = self.intensity.max(axis=0).tocsr() @@ -217,7 +217,7 @@ def mean(self, dim=None): """ if dim is not None: return reduce_unique_selection( - values=getattr(self.dim), select=dim, reduce_attr="mean" + self, values=dim, select=dim, reduce_attr="mean" ) red_intensity = sparse.csr_matrix(self.intensity.mean(axis=0)) From bc6f5765ca35a8437658ebffcd925094d69b276d Mon Sep 17 00:00:00 2001 From: luseverin Date: Wed, 10 Dec 2025 16:33:56 +0100 Subject: [PATCH 33/34] Parametrize hazard fixtures with different members and lead times and continue drafting tests --- climada/hazard/test/test_forecast.py | 104 ++++++++++++++------------- 1 file changed, 54 insertions(+), 50 deletions(-) diff --git a/climada/hazard/test/test_forecast.py b/climada/hazard/test/test_forecast.py index f1eaa30c52..b83512c475 100644 --- a/climada/hazard/test/test_forecast.py +++ b/climada/hazard/test/test_forecast.py @@ -30,7 +30,7 @@ from climada.hazard.test.test_base import hazard_kwargs -@pytest.fixture +@pytest.fixture(scope="module") def haz_kwargs(): return hazard_kwargs() @@ -40,14 +40,18 @@ def hazard(haz_kwargs): return Hazard(**haz_kwargs) -@pytest.fixture -def lead_time(haz_kwargs): - return pd.timedelta_range("1h", periods=len(haz_kwargs["event_id"])).to_numpy() +@pytest.fixture(scope="module", params=[1, 2]) +def lead_time(request, haz_kwargs): + base_range = pd.timedelta_range( + "1h", periods=len(haz_kwargs["event_id"]) // request.param + ) + return np.tile(base_range.to_numpy(), request.param) -@pytest.fixture -def member(haz_kwargs): - return np.arange(len(haz_kwargs["event_id"])) +@pytest.fixture(scope="module", params=[1, 2]) +def member(request, haz_kwargs): + base_range = np.arange(len(haz_kwargs["event_id"]) // request.param) + return np.tile(base_range, request.param) @pytest.fixture @@ -260,55 +264,55 @@ def test_hazard_forecast_mean_min_max(haz_fc, attr): npt.assert_array_equal(haz_fcst_reduced.orig, [True]) -def test_hazard_forecast_mean_min_max_dim(haz_fc): +@pytest.mark.parametrize("attr", ["min", "mean", "max"]) +def test_hazard_forecast_mean_min_max_member(haz_fc, attr): """Check mean, min, and max methods for ImpactForecast with dim argument""" - for attr in ["min", "mean", "max"]: - for dim, unique_vals in zip( - ["member", "lead_time"], - [np.unique(haz_fc.member), np.unique(haz_fc.lead_time)], - ): - haz_fcst_reduced = getattr(haz_fc, attr)(dim=dim) - # Assert sparse matrices - expected_intensity = [] - expected_fraction = [] - for val in unique_vals: - mask = getattr(haz_fc, dim) == val - expected_intensity.append( - getattr(haz_fc.intensity.todense()[mask], attr)(axis=0) - ) - expected_fraction.append( - getattr(haz_fc.fraction.todense()[mask], attr)(axis=0) - ) - npt.assert_array_equal( - haz_fcst_reduced.intensity.todense(), - np.vstack(expected_intensity), + + for dim, unique_vals in zip( + ["member", "lead_time"], + [np.unique(haz_fc.member), np.unique(haz_fc.lead_time)], + ): + haz_fcst_reduced = getattr(haz_fc, attr)(dim=dim) + # Assert sparse matrices + expected_intensity = [] + expected_fraction = [] + for val in unique_vals: + mask = getattr(haz_fc, dim) == val + expected_intensity.append( + getattr(haz_fc.intensity.todense()[mask], attr)(axis=0) ) + expected_fraction.append( + getattr(haz_fc.fraction.todense()[mask], attr)(axis=0) + ) + npt.assert_array_equal( + haz_fcst_reduced.intensity.todense(), + np.vstack(expected_intensity), + ) + npt.assert_array_equal( + haz_fcst_reduced.fraction.todense(), + np.vstack(expected_fraction), + ) + # Check that attributes where reduced correctly + if dim == "member": + npt.assert_array_equal(haz_fcst_reduced.member, unique_vals) npt.assert_array_equal( - haz_fcst_reduced.fraction.todense(), - np.vstack(expected_fraction), + haz_fcst_reduced.lead_time, + np.array([np.timedelta64("NaT")] * len(unique_vals)), ) - - # Check that attributes where reduced correctly - if dim == "member": - npt.assert_array_equal(haz_fcst_reduced.member, unique_vals) - npt.assert_array_equal( - haz_fcst_reduced.lead_time, - np.array([np.timedelta64("NaT")] * len(unique_vals)), - ) - else: # dim == "lead_time" - npt.assert_array_equal(haz_fcst_reduced.lead_time, unique_vals) - npt.assert_array_equal( - haz_fcst_reduced.member, - np.array([-1] * len(unique_vals)), - ) + else: # dim == "lead_time" + npt.assert_array_equal(haz_fcst_reduced.lead_time, unique_vals) npt.assert_array_equal( - haz_fcst_reduced.event_name, - np.array([attr] * len(unique_vals)), + haz_fcst_reduced.member, + np.array([-1] * len(unique_vals)), ) - npt.assert_array_equal(haz_fcst_reduced.event_id, [0] * len(unique_vals)) - npt.assert_array_equal(haz_fcst_reduced.frequency, [1] * len(unique_vals)) - npt.assert_array_equal(haz_fcst_reduced.date, [0] * len(unique_vals)) - npt.assert_array_equal(haz_fcst_reduced.orig, [True] * len(unique_vals)) + npt.assert_array_equal( + haz_fcst_reduced.event_name, + np.array([attr] * len(unique_vals)), + ) + npt.assert_array_equal(haz_fcst_reduced.event_id, [0] * len(unique_vals)) + npt.assert_array_equal(haz_fcst_reduced.frequency, [1] * len(unique_vals)) + npt.assert_array_equal(haz_fcst_reduced.date, [0] * len(unique_vals)) + npt.assert_array_equal(haz_fcst_reduced.orig, [True] * len(unique_vals)) # TODO add test in case no reduction happens (e.g., all values along dim are unique) From 5b46f63697ac849ab603577845835969471d936e Mon Sep 17 00:00:00 2001 From: Lukas Riedel <34276446+peanutfun@users.noreply.github.com> Date: Wed, 17 Dec 2025 12:13:57 +0100 Subject: [PATCH 34/34] Adjust reduction logic with respect to members Tests are still failing! --- climada/hazard/forecast.py | 31 ++++++++++++++++----------- climada/hazard/test/test_forecast.py | 32 +++++++++++++++------------- climada/util/forecast.py | 4 ++++ 3 files changed, 40 insertions(+), 27 deletions(-) diff --git a/climada/hazard/forecast.py b/climada/hazard/forecast.py index 97ba768d69..42e1f3b0bf 100644 --- a/climada/hazard/forecast.py +++ b/climada/hazard/forecast.py @@ -106,7 +106,7 @@ def _check_sizes(self): size(exp_len=num_entries, var=self.member, var_name="Forecast.member") size(exp_len=num_entries, var=self.lead_time, var_name="Forecast.lead_time") - def _reduce_attrs(self, event_name: str): + def _reduce_attrs(self, event_name: str, **attrs): """ Reduce the attributes of a HazardForecast to a single value. @@ -123,15 +123,22 @@ def _reduce_attrs(self, event_name: str): event_name : str The event_name given to the reduced data. """ + + def unique_or_default(attr, default): + if len(unique := np.unique(getattr(self, attr))) == 1: + return unique.item(0) + return default + reduced_attrs = { - "lead_time": np.array([np.timedelta64("NaT")]), - "member": np.array([-1]), - "event_id": np.array([0]), - "event_name": np.array([event_name]), - "date": np.array([0]), - "frequency": np.array([1]), - "orig": np.array([True]), - } + "lead_time": unique_or_default("lead_time", np.timedelta64("NaT")), + "member": unique_or_default("member", -1), + "event_id": unique_or_default("event_id", 1), + "event_name": unique_or_default("event_name", event_name), + "date": unique_or_default("date", 0), + "frequency": 1, + "orig": unique_or_default("orig", True), + } | attrs + reduced_attrs = {key: np.array([value]) for key, value in reduced_attrs.items()} return reduced_attrs @@ -152,7 +159,7 @@ def min(self, dim=None): """ if dim is not None: return reduce_unique_selection( - self, values=dim, select=dim, reduce_attr="min" + self, values=getattr(self, dim), select=dim, reduce_attr="min" ) red_intensity = self.intensity.min(axis=0).tocsr() @@ -185,7 +192,7 @@ def max(self, dim=None): """ if dim is not None: return reduce_unique_selection( - self, values=dim, select=dim, reduce_attr="max" + self, values=getattr(self, dim), select=dim, reduce_attr="max" ) red_intensity = self.intensity.max(axis=0).tocsr() @@ -217,7 +224,7 @@ def mean(self, dim=None): """ if dim is not None: return reduce_unique_selection( - self, values=dim, select=dim, reduce_attr="mean" + self, values=getattr(self, dim), select=dim, reduce_attr="mean" ) red_intensity = sparse.csr_matrix(self.intensity.mean(axis=0)) diff --git a/climada/hazard/test/test_forecast.py b/climada/hazard/test/test_forecast.py index 54a17ec040..29fc111d0f 100644 --- a/climada/hazard/test/test_forecast.py +++ b/climada/hazard/test/test_forecast.py @@ -40,17 +40,17 @@ def hazard(haz_kwargs): return Hazard(**haz_kwargs) -@pytest.fixture(scope="module", params=[1, 2]) +# TODO: params should only apply to dimension reduction test +@pytest.fixture def lead_time(request, haz_kwargs): - base_range = pd.timedelta_range( - "1h", periods=len(haz_kwargs["event_id"]) // request.param - ) + return pd.timedelta_range("1h", periods=len(haz_kwargs["event_id"])) return np.tile(base_range.to_numpy(), request.param) -@pytest.fixture(scope="module", params=[1, 2]) +# TODO: params should only apply to dimension reduction test +@pytest.fixture def member(request, haz_kwargs): - base_range = np.arange(len(haz_kwargs["event_id"]) // request.param) + return np.arange(len(haz_kwargs["event_id"])) return np.tile(base_range, request.param) @@ -258,7 +258,7 @@ def test_hazard_forecast_mean_min_max(haz_fc, attr): npt.assert_array_equal(np.isnat(haz_fcst_reduced.lead_time), [True]) npt.assert_array_equal(haz_fcst_reduced.member, [-1]) npt.assert_array_equal(haz_fcst_reduced.event_name, [attr]) - npt.assert_array_equal(haz_fcst_reduced.event_id, [0]) + npt.assert_array_equal(haz_fcst_reduced.event_id, [1]) npt.assert_array_equal(haz_fcst_reduced.frequency, [1]) npt.assert_array_equal(haz_fcst_reduced.date, [0]) npt.assert_array_equal(haz_fcst_reduced.orig, [True]) @@ -288,7 +288,7 @@ def test_hazard_forecast_quantile(haz_fc, quantile): npt.assert_array_equal( haz_fcst_quantile.event_name, np.array([f"quantile_{quantile}"]) ) - npt.assert_array_equal(haz_fcst_quantile.event_id, np.array([0])) + npt.assert_array_equal(haz_fcst_quantile.event_id, np.array([1])) npt.assert_array_equal(haz_fcst_quantile.frequency, np.array([1])) npt.assert_array_equal(haz_fcst_quantile.date, np.array([0])) npt.assert_array_equal(haz_fcst_quantile.orig, np.array([True])) @@ -315,7 +315,7 @@ def test_median(haz_fc): # check that attributes where reduced correctly npt.assert_array_equal(haz_fcst_median.member, np.array([-1])) npt.assert_array_equal(haz_fcst_median.lead_time, np.array([np.timedelta64("NaT")])) - npt.assert_array_equal(haz_fcst_median.event_id, np.array([0])) + npt.assert_array_equal(haz_fcst_median.event_id, np.array([1])) npt.assert_array_equal(haz_fcst_median.event_name, np.array(["median"])) npt.assert_array_equal(haz_fcst_median.frequency, np.array([1])) npt.assert_array_equal(haz_fcst_median.date, np.array([0])) @@ -324,7 +324,7 @@ def test_median(haz_fc): @pytest.mark.parametrize("attr", ["min", "mean", "max"]) def test_hazard_forecast_mean_min_max_member(haz_fc, attr): - """Check mean, min, and max methods for ImpactForecast with dim argument""" + """Check mean, min, and max methods for HazardForecast with dim argument""" for dim, unique_vals in zip( ["member", "lead_time"], @@ -351,14 +351,16 @@ def test_hazard_forecast_mean_min_max_member(haz_fc, attr): np.vstack(expected_fraction), ) # Check that attributes where reduced correctly - if dim == "member": - npt.assert_array_equal(haz_fcst_reduced.member, unique_vals) + if dim == "lead_time": + npt.assert_array_equal(haz_fcst_reduced.member, np.unique(haz_fc.member)) npt.assert_array_equal( haz_fcst_reduced.lead_time, np.array([np.timedelta64("NaT")] * len(unique_vals)), ) - else: # dim == "lead_time" - npt.assert_array_equal(haz_fcst_reduced.lead_time, unique_vals) + else: # dim == "member" + npt.assert_array_equal( + haz_fcst_reduced.lead_time, np.unique(haz_fc.lead_time) + ) npt.assert_array_equal( haz_fcst_reduced.member, np.array([-1] * len(unique_vals)), @@ -377,5 +379,5 @@ def test_hazard_forecast_mean_min_max_member(haz_fc, attr): def test_hazard_forecast_mean_max_min_dim_error(haz_fc): """Check mean, min, and max methods for ImpactForecast with dim argument""" for attr in ["min", "mean", "max"]: - with pytest.raises(ValueError, match="not a valid dimension"): + with pytest.raises(AttributeError): getattr(haz_fc, attr)(dim="invalid_dim") diff --git a/climada/util/forecast.py b/climada/util/forecast.py index e9631e2619..a65e3cae14 100644 --- a/climada/util/forecast.py +++ b/climada/util/forecast.py @@ -98,14 +98,18 @@ def reduce_unique_selection(forecast, values, select, reduce_attr): """ Reduce an attribute of a forecast object by selecting unique values and performing an attribute reduction method. + Parameters ---------- + forecast : HazardForecast | ImpactForecast + Forecast object to reduce. values : np.ndarray Array of values for which to select and reduce the attribute. select : str Name of the attribute to select on (e.g. 'lead_time', 'member'). reduce_attr : str Name of the attribute reduction method to call (e.g. 'min', 'mean'). + Returns ------- Forecast