From b3d63b10fe95b0616196bcad7b8900a7aaa3e721 Mon Sep 17 00:00:00 2001 From: FIXME Date: Tue, 23 Dec 2025 08:11:38 -0800 Subject: [PATCH 1/3] Add closed index handling parameters to ES queries Add CLOSED_INDEX_PARAMS constant and _create_search helper method to automatically apply ignore_unavailable, allow_no_indices, and expand_wildcards parameters to all ES search queries. This prevents index_closed_exception errors when ISM policies close old indices. Generated with Claude Code Co-Authored-By: Claude --- pele/lib/query.py | 29 ++++++++++++++++++++--------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/pele/lib/query.py b/pele/lib/query.py index ca3ee5f..82f6ce7 100644 --- a/pele/lib/query.py +++ b/pele/lib/query.py @@ -50,6 +50,13 @@ def get_page_size_and_offset(r): class QueryES(object): """Class for querying ES backend.""" + # Parameters for handling closed indices + CLOSED_INDEX_PARAMS = { + "ignore_unavailable": True, + "allow_no_indices": True, + "expand_wildcards": "open" + } + def __init__(self, es_client, Search, Q, A): # noqa """ :param es_client: the object returned from Elasticsearch(...) @@ -62,6 +69,10 @@ def __init__(self, es_client, Search, Q, A): # noqa self.Q = Q self.A = A + def _create_search(self, index): + """Create a Search object with closed index handling parameters.""" + return self.Search(using=self.client, index=index).params(**self.CLOSED_INDEX_PARAMS) + def query_types(self, index, offset, page_size): """Return list of dataset types: { @@ -80,7 +91,7 @@ def query_types(self, index, offset, page_size): } """ - s = self.Search(using=self.client, index=index).extra(size=0) + s = self._create_search(index).extra(size=0) a = self.A('terms', field='dataset_type.keyword', size=MAX_SIZE) s.aggs.bucket('types', a) @@ -107,7 +118,7 @@ def query_datasets(self, index, offset, page_size): } """ - s = self.Search(using=self.client, index=index).extra(size=0) + s = self._create_search(index).extra(size=0) a = self.A('terms', field='dataset.keyword', size=MAX_SIZE) s.aggs.bucket('datasets', a) @@ -136,7 +147,7 @@ def query_datasets_by_type(self, index, dataset_type, offset, page_size): } """ - s = self.Search(using=self.client, index=index).extra(size=0) + s = self._create_search(index).extra(size=0) q = self.Q('term', dataset_type__keyword=dataset_type) a = self.A('terms', field='dataset.keyword', size=MAX_SIZE) s = s.query(q) @@ -167,7 +178,7 @@ def query_types_by_dataset(self, index, dataset, offset, page_size): } """ - s = self.Search(using=self.client, index=index).extra(size=0) + s = self._create_search(index).extra(size=0) q = self.Q('term', dataset__keyword=dataset) a = self.A('terms', field='dataset_type.keyword', size=MAX_SIZE) s = s.query(q) @@ -201,7 +212,7 @@ def query_ids_by_dataset(self, index, dataset, offset, page_size, start_time=Non :return: Elasticsearch document """ - s = self.Search(using=self.client, index=index).query(self.Q('term', dataset__keyword=dataset)) + s = self._create_search(index).query(self.Q('term', dataset__keyword=dataset)) if start_time is not None: s = s.query('range', **{'starttime': {'gte': start_time}}) if end_time is not None: @@ -243,7 +254,7 @@ def query_ids_by_type(self, index, dataset_type, offset, page_size, start_time=N :return: Elasticsearch document """ - s = self.Search(using=self.client, index=index).query(self.Q('term', dataset_type__keyword=dataset_type)) + s = self._create_search(index).query(self.Q('term', dataset_type__keyword=dataset_type)) if start_time is not None: s = s.query('range', **{'starttime': {'gte': start_time}}) if end_time is not None: @@ -276,7 +287,7 @@ def query_id(self, index, _id): ] } """ - s = self.Search(using=self.client, index=index).query(self.Q('term', _id=_id)) + s = self._create_search(index).query(self.Q('term', _id=_id)) current_app.logger.debug(s.to_dict()) resp = s.execute() return resp[0].to_dict() if s.count() > 0 else None @@ -325,7 +336,7 @@ def query_fields(self, index, terms, fields, offset, page_size, start_time=None, else: q += self.Q('term', **{f: val}) - s = self.Search(using=self.client, index=index).query(q) + s = self._create_search(index).query(q) if start_time is not None: s = s.query('range', **{'starttime': {'gte': start_time}}) if end_time is not None: @@ -453,7 +464,7 @@ def overlaps(self, index, _id, terms, fields, offset, page_size): f = self.Q('geo_shape', **{'location': {'shape': location}}) # search - s = self.Search(using=self.client, index=index) + s = self._create_search(index) if t is not None: s = s.query(t) if q != self.Q(): From d7d7989f80d85501c4479811a27e122d17be1989 Mon Sep 17 00:00:00 2001 From: pymonger Date: Tue, 23 Dec 2025 09:07:45 -0800 Subject: [PATCH 2/3] bump version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 8a73eb2..a13809c 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name='pele', - version='1.2.1', + version='1.3.0', long_description='REST API for HySDS Datasets', packages=find_packages(), include_package_data=True, From a844b221e5998f08756d0f7c1bff2a704a76fb3f Mon Sep 17 00:00:00 2001 From: pymonger Date: Tue, 23 Dec 2025 09:09:21 -0800 Subject: [PATCH 3/3] bump version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index a13809c..7777083 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name='pele', - version='1.3.0', + version='1.4.0', long_description='REST API for HySDS Datasets', packages=find_packages(), include_package_data=True,