Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
140 changes: 140 additions & 0 deletions API/Routes/System/HealthRoute.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
from flask import Blueprint, jsonify
import platform
import shutil
import os
import time
import logging
from pathlib import Path
from Classes.Base import Config

health_api = Blueprint('HealthRoute', __name__)

# Basic module-level variables to cache solver check results
_SOLVER_CACHE_DATA = {}
_SOLVER_CACHE_TIME = 0.0
CACHE_TTL = 300 # 5 minutes


def _check_solver(binary_name, env_var):
"""Check if a solver binary is reachable.

Resolution order mirrors Osemosys._resolve_solver_folder:
1. Environment variable
2. System PATH (shutil.which)
3. Bundled binary under SOLVERs_FOLDER
"""
allowed_names = _binary_names(binary_name)

# 1 — env var
env_val = os.environ.get(env_var, "").strip().strip("\"'")
if env_val:
env_path = Path(env_val).expanduser()
if env_path.is_file():
# Validate that the file is actually one of the expected binary names
if env_path.name.lower() in [n.lower() for n in allowed_names]:
return {"found": True, "source": "env", "path": str(env_path)}
# directory — look inside
if env_path.is_dir():
for name in allowed_names:
candidate = env_path / name
if candidate.is_file():
return {"found": True, "source": "env", "path": str(candidate)}
return {"found": False, "source": None, "path": None}

# 2 — system PATH
for name in allowed_names:
which_result = shutil.which(name)
if which_result:
return {"found": True, "source": "path", "path": which_result}

# 3 — bundled
# Optimize search by checking common subdirectories (GLPK, CBC) first
bundled_dir = Config.SOLVERs_FOLDER
# Subdirs to check to avoid full recursive scan of a large SOLVERs_FOLDER
search_dirs = [bundled_dir]
if binary_name.lower() == "glpsol":
search_dirs.append(bundled_dir / "GLPK")
elif binary_name.lower() == "cbc":
search_dirs.append(bundled_dir / "COIN-OR")
search_dirs.append(bundled_dir / "cbc")

for s_dir in search_dirs:
if not s_dir.exists():
continue
for name in allowed_names:
# Check shallowly first to avoid rglob complexity when possible
candidate = s_dir / name
if candidate.is_file():
return {"found": True, "source": "bundled", "path": str(candidate)}

# Only rglob if we haven't found it (rglob is expensive)
# We limit to first match if rglob is used
for r_candidate in s_dir.rglob(name):
if r_candidate.is_file():
return {"found": True, "source": "bundled", "path": str(r_candidate)}
break

return {"found": False, "source": None, "path": None}


def _binary_names(binary_name):
"""Return list of possible binary filenames for the current platform."""
names = [binary_name]
if platform.system() == "Windows" and not binary_name.lower().endswith(".exe"):
names.insert(0, binary_name + ".exe")
return names


@health_api.route("/api/health", methods=['GET'])
def healthCheck():
"""Basic liveness and readiness check — confirms the Flask backend is healthy."""
try:
# verify DataStorage is accessible and writable
storage_ok = Config.DATA_STORAGE.is_dir() and os.access(Config.DATA_STORAGE, os.W_OK)

response = {
"status": "ok" if storage_ok else "error",
"platform": platform.system(),
"architecture": platform.machine(),
"python": platform.python_version(),
"dataStorage": "writable" if storage_ok else "error"
}

if not storage_ok:
return jsonify(response), 503

return jsonify(response), 200
Comment on lines +94 to +106
Copy link

Copilot AI Mar 24, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The response always sets "status": "ok" and returns HTTP 200 even when dataStorage is not writable (dataStorage="error"). For a readiness/liveness endpoint used by startup guards, this makes it hard to distinguish a healthy backend from a degraded one. Consider setting status to "error" (and returning 5xx/503) when storage_ok is false, or splitting liveness vs readiness semantics explicitly.

Suggested change
response = {
"status": "ok",
"platform": platform.system(),
"architecture": platform.machine(),
"python": platform.python_version(),
"dataStorage": "writable" if storage_ok else "error"
}
return jsonify(response), 200
overall_status = "ok" if storage_ok else "error"
http_status = 200 if storage_ok else 503
response = {
"status": overall_status,
"platform": platform.system(),
"architecture": platform.machine(),
"python": platform.python_version(),
"dataStorage": "writable" if storage_ok else "error"
}
return jsonify(response), http_status

Copilot uses AI. Check for mistakes.
except Exception as e:
logging.error(f"Health check failed: {e}")
return jsonify({"status": "error", "message": "Failed to perform system health check"}), 500


@health_api.route("/api/health/solvers", methods=['GET'])
def solverStatus():
"""Report availability of GLPK and CBC solvers on this machine.

Uses simple module variables to avoid repeated disk scans during high-frequency polling.
"""
global _SOLVER_CACHE_DATA, _SOLVER_CACHE_TIME
try:
# Check cache
current_time = time.time()
if _SOLVER_CACHE_DATA and (current_time - _SOLVER_CACHE_TIME < CACHE_TTL):
return jsonify(_SOLVER_CACHE_DATA), 200

glpk = _check_solver("glpsol", "SOLVER_GLPK_PATH")
cbc = _check_solver("cbc", "SOLVER_CBC_PATH")

response = {
"glpk": glpk,
"cbc": cbc,
"anyAvailable": glpk["found"] or cbc["found"]
}

_SOLVER_CACHE_DATA = response
_SOLVER_CACHE_TIME = current_time

return jsonify(response), 200
except Exception as e:
logging.error(f"Solver health check failed: {e}")
return jsonify({"status": "error", "message": "Failed to perform solver health check"}), 500
Empty file added API/Routes/System/__init__.py
Empty file.
2 changes: 2 additions & 0 deletions API/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from Routes.Case.SyncS3Route import syncs3_api
from Routes.Case.ViewDataRoute import viewdata_api
from Routes.DataFile.DataFileRoute import datafile_api
from Routes.System.HealthRoute import health_api

#RADI
# -------------------------
Expand Down Expand Up @@ -63,6 +64,7 @@
app.register_blueprint(viewdata_api)
app.register_blueprint(datafile_api)
app.register_blueprint(syncs3_api)
app.register_blueprint(health_api)

CORS(app)

Expand Down
139 changes: 139 additions & 0 deletions tests/test_health.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
"""Tests for the /api/health and /api/health/solvers endpoints."""

import json
import sys
from pathlib import Path
from unittest.mock import patch

import pytest

# ---------------------------------------------------------------------------
# Make sure the API package is importable when running from the repo root.
# The Flask app expects to run from inside API/, so we add that to sys.path
# the same way the start scripts do.
# ---------------------------------------------------------------------------
API_DIR = Path(__file__).resolve().parents[1] / "API"
if str(API_DIR) not in sys.path:
sys.path.insert(0, str(API_DIR))

from app import app # noqa: E402
from Routes.System import HealthRoute # noqa: E402


@pytest.fixture
def client():
"""Flask test client with testing mode enabled."""
app.config["TESTING"] = True
with app.test_client() as c:
yield c


@pytest.fixture(autouse=True)
def reset_health_cache(monkeypatch):
"""Ensure every test runs on a clean environment/cache.

Copilot Review point: Ensure solver env vars are cleared so tests
deterministically use the mocked shutil.which or bundled paths.
"""
monkeypatch.setattr(HealthRoute, "_SOLVER_CACHE_DATA", {})
monkeypatch.setattr(HealthRoute, "_SOLVER_CACHE_TIME", 0.0)
# Clear environment variables so they don't interfere with PATH/bundled tests
for var in ["SOLVER_GLPK_PATH", "SOLVER_CBC_PATH"]:
monkeypatch.delenv(var, raising=False)


# ── /api/health ──────────────────────────────────────────────────────────────

class TestHealthEndpoint:

def test_returns_200_when_healthy(self, client):
resp = client.get("/api/health")
assert resp.status_code == 200

def test_returns_503_when_storage_unwritable(self, client):
"""Copilot Review point: Return non-200 when storage is degraded."""
with patch("Routes.System.HealthRoute.Config") as mock_cfg:
import tempfile
with tempfile.TemporaryDirectory() as tmpdir:
# Mock a directory that exists but we'll make reports as unwritable
# (Easiest way in this context is to mock os.access or just the dir check result)
mock_cfg.DATA_STORAGE = Path(tmpdir)
with patch("Routes.System.HealthRoute.os.access", return_value=False):
resp = client.get("/api/health")

assert resp.status_code == 503
data = json.loads(resp.data)
assert data["status"] == "error"
assert data["dataStorage"] == "error"

def test_response_contains_required_fields(self, client):
resp = client.get("/api/health")
data = json.loads(resp.data)
assert data["status"] == "ok"
assert "platform" in data
assert "python" in data
assert "architecture" in data
assert "dataStorage" in data

def test_python_version_looks_valid(self, client):
resp = client.get("/api/health")
data = json.loads(resp.data)
# should be something like "3.11.9"
parts = data["python"].split(".")
assert len(parts) >= 2
assert int(parts[0]) >= 3


# ── /api/health/solvers ──────────────────────────────────────────────────────

class TestSolverStatusEndpoint:

def test_returns_200(self, client):
resp = client.get("/api/health/solvers")
assert resp.status_code == 200

def test_response_has_solver_keys(self, client):
resp = client.get("/api/health/solvers")
data = json.loads(resp.data)
assert "glpk" in data
assert "cbc" in data
assert "anyAvailable" in data

def test_solver_entry_shape(self, client):
"""Each solver entry should have found/source/path keys."""
resp = client.get("/api/health/solvers")
data = json.loads(resp.data)
for solver_key in ("glpk", "cbc"):
entry = data[solver_key]
assert "found" in entry
assert "source" in entry
assert "path" in entry

@patch("Routes.System.HealthRoute.shutil.which")
def test_glpk_found_on_path(self, mock_which, client):
"""When glpsol is on PATH, glpk should report found=True."""
def side_effect(name):
if name in ("glpsol", "glpsol.exe"):
return "/usr/bin/glpsol"
return None
mock_which.side_effect = side_effect

resp = client.get("/api/health/solvers")
data = json.loads(resp.data)
assert data["glpk"]["found"] is True
assert data["glpk"]["source"] == "path"

Comment on lines +112 to +125
Copy link

Copilot AI Mar 24, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This test assumes the solver env vars are unset so that PATH resolution is exercised. If SOLVER_GLPK_PATH is set in the test runner environment, _check_solver will take the env-var branch and the assertion source=="path" can fail. Use monkeypatch/delenv (or patch.dict on os.environ) to explicitly clear SOLVER_GLPK_PATH and SOLVER_CBC_PATH for deterministic results.

Copilot uses AI. Check for mistakes.
@patch("Routes.System.HealthRoute.shutil.which", return_value=None)
def test_no_solvers_reports_false(self, mock_which, client):
"""When no solver is found anywhere, anyAvailable should be False."""
# also need to make sure bundled dir scan finds nothing
with patch("Routes.System.HealthRoute.Config") as mock_cfg:
# point SOLVERs_FOLDER to a temp dir that has nothing
import tempfile
with tempfile.TemporaryDirectory() as tmpdir:
mock_cfg.SOLVERs_FOLDER = Path(tmpdir)
mock_cfg.DATA_STORAGE = Path(tmpdir)
resp = client.get("/api/health/solvers")
Comment on lines +130 to +136
Copy link

Copilot AI Mar 24, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Similarly, this test can become environment-dependent if SOLVER_GLPK_PATH/SOLVER_CBC_PATH are set in the runner environment, since the env-var branch runs before PATH/bundled checks. Explicitly clear those env vars in the test to make sure the mocked shutil.which + empty bundled dir are what drive the result.

Suggested change
with patch("Routes.System.HealthRoute.Config") as mock_cfg:
# point SOLVERs_FOLDER to a temp dir that has nothing
import tempfile
with tempfile.TemporaryDirectory() as tmpdir:
mock_cfg.SOLVERs_FOLDER = Path(tmpdir)
mock_cfg.DATA_STORAGE = Path(tmpdir)
resp = client.get("/api/health/solvers")
# and that any solver path environment variables do not interfere
orig_glpk = os.environ.pop("SOLVER_GLPK_PATH", None)
orig_cbc = os.environ.pop("SOLVER_CBC_PATH", None)
try:
with patch("Routes.System.HealthRoute.Config") as mock_cfg:
# point SOLVERs_FOLDER to a temp dir that has nothing
import tempfile
with tempfile.TemporaryDirectory() as tmpdir:
mock_cfg.SOLVERs_FOLDER = Path(tmpdir)
mock_cfg.DATA_STORAGE = Path(tmpdir)
resp = client.get("/api/health/solvers")
finally:
if orig_glpk is not None:
os.environ["SOLVER_GLPK_PATH"] = orig_glpk
if orig_cbc is not None:
os.environ["SOLVER_CBC_PATH"] = orig_cbc

Copilot uses AI. Check for mistakes.

Comment on lines +112 to +137
Copy link

Copilot AI Mar 24, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These tests patch shutil.which but don't control SOLVER_GLPK_PATH / SOLVER_CBC_PATH. If a developer/CI environment has either env var set, _check_solver will take the env-var branch and the assertions here can fail (or skip the patched PATH behavior). Consider clearing those env vars in the test (e.g., via monkeypatch.delenv) to make the tests deterministic.

Copilot uses AI. Check for mistakes.
data = json.loads(resp.data)
assert data["anyAvailable"] is False
Loading